commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
af85d44d9a6f7cf65fe504816bcf4a10ba603d51
|
pdfdocument/utils.py
|
pdfdocument/utils.py
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, **kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return PDFDocument(response, **kwargs), response
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, pdfdocument=PDFDocument,
**kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return pdfdocument(response, **kwargs), response
|
Make the PDFDocument class used in pdf_response configurable
|
Make the PDFDocument class used in pdf_response configurable
|
Python
|
bsd-3-clause
|
matthiask/pdfdocument,dongguangming/pdfdocument
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
- def pdf_response(filename, as_attachment=True, **kwargs):
+
+ def pdf_response(filename, as_attachment=True, pdfdocument=PDFDocument,
+ **kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
- return PDFDocument(response, **kwargs), response
+ return pdfdocument(response, **kwargs), response
|
Make the PDFDocument class used in pdf_response configurable
|
## Code Before:
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, **kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return PDFDocument(response, **kwargs), response
## Instruction:
Make the PDFDocument class used in pdf_response configurable
## Code After:
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, pdfdocument=PDFDocument,
**kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return pdfdocument(response, **kwargs), response
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
- def pdf_response(filename, as_attachment=True, **kwargs):
+
+ def pdf_response(filename, as_attachment=True, pdfdocument=PDFDocument,
+ **kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
- return PDFDocument(response, **kwargs), response
? ^^^^
+ return pdfdocument(response, **kwargs), response
? ^^^^
|
fc6e3c276ee638fbb4409fa00d470817205f2028
|
lib/awsflow/test/workflow_testing_context.py
|
lib/awsflow/test/workflow_testing_context.py
|
from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
self._context = self.get_context()
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
|
from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
try:
self._context = self.get_context()
except AttributeError:
self._context = None
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
|
Fix context setting on the test context
|
Fix context setting on the test context
|
Python
|
apache-2.0
|
darjus/botoflow,boto/botoflow
|
from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
+ try:
- self._context = self.get_context()
+ self._context = self.get_context()
+ except AttributeError:
+ self._context = None
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
|
Fix context setting on the test context
|
## Code Before:
from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
self._context = self.get_context()
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
## Instruction:
Fix context setting on the test context
## Code After:
from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
try:
self._context = self.get_context()
except AttributeError:
self._context = None
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
|
from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
+ try:
- self._context = self.get_context()
+ self._context = self.get_context()
? ++++
+ except AttributeError:
+ self._context = None
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
|
4a98d2ce95d6a082588e4ccc8e04454c26260ca0
|
helpers.py
|
helpers.py
|
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
|
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
|
Make get_readable_list process tuples, too
|
Make get_readable_list process tuples, too
|
Python
|
agpl-3.0
|
hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook
|
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
- if isinstance(passed_list, list):
+ if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
+
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
|
Make get_readable_list process tuples, too
|
## Code Before:
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
## Instruction:
Make get_readable_list process tuples, too
## Code After:
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
|
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
- if isinstance(passed_list, list):
+ if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
+
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
|
755e4c5ca84072d9983de0b1a0e76419cde77f66
|
lib/repo/git_hooks/update.d/02-block_change_top_level_master.py
|
lib/repo/git_hooks/update.d/02-block_change_top_level_master.py
|
import os
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
# no need to check if old_commit or new_commit are 0, master can't be deleted or created
if ref_name == 'refs/heads/master' and os.environ.get('REMOTE_USER') is not None: # push not coming from MarkUs
# check 1: created/deleted top level files/directories
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout:
print('[MARKUS] Error: creating/deleting top level files and directories is not allowed on master!')
exit(1)
# check 2: modified top level files
changes = subprocess.run(['git', 'diff', '--name-only', old_commit, new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if any(os.sep not in change for change in changes.stdout.splitlines()):
print('[MARKUS] Error: modifying top level files is not allowed on master!')
exit(1)
|
import os
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
# no need to check if old_commit or new_commit are 0, master can't be deleted or created
if ref_name == 'refs/heads/master' and os.environ.get('REMOTE_USER') is not None: # push not coming from MarkUs
# check 1: created/deleted top level files/directories
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout:
print('[MARKUS] Error: creating/deleting top level files and directories is not allowed on master!')
exit(1)
# check 2: modified top level files
changes = subprocess.run(['git', 'diff', '--name-only', '--no-renames', old_commit, new_commit],
stdout=subprocess.PIPE, universal_newlines=True)
if any(os.sep not in change for change in changes.stdout.splitlines()):
print('[MARKUS] Error: modifying top level files is not allowed on master!')
exit(1)
|
Fix top level hook to not use renames
|
git: Fix top level hook to not use renames
|
Python
|
mit
|
benjaminvialle/Markus,MarkUsProject/Markus,MarkUsProject/Markus,benjaminvialle/Markus,benjaminvialle/Markus,benjaminvialle/Markus,benjaminvialle/Markus,MarkUsProject/Markus,MarkUsProject/Markus,benjaminvialle/Markus,MarkUsProject/Markus,MarkUsProject/Markus,benjaminvialle/Markus,MarkUsProject/Markus,MarkUsProject/Markus
|
import os
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
# no need to check if old_commit or new_commit are 0, master can't be deleted or created
if ref_name == 'refs/heads/master' and os.environ.get('REMOTE_USER') is not None: # push not coming from MarkUs
# check 1: created/deleted top level files/directories
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout:
print('[MARKUS] Error: creating/deleting top level files and directories is not allowed on master!')
exit(1)
# check 2: modified top level files
- changes = subprocess.run(['git', 'diff', '--name-only', old_commit, new_commit], stdout=subprocess.PIPE,
+ changes = subprocess.run(['git', 'diff', '--name-only', '--no-renames', old_commit, new_commit],
- universal_newlines=True)
+ stdout=subprocess.PIPE, universal_newlines=True)
if any(os.sep not in change for change in changes.stdout.splitlines()):
print('[MARKUS] Error: modifying top level files is not allowed on master!')
exit(1)
|
Fix top level hook to not use renames
|
## Code Before:
import os
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
# no need to check if old_commit or new_commit are 0, master can't be deleted or created
if ref_name == 'refs/heads/master' and os.environ.get('REMOTE_USER') is not None: # push not coming from MarkUs
# check 1: created/deleted top level files/directories
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout:
print('[MARKUS] Error: creating/deleting top level files and directories is not allowed on master!')
exit(1)
# check 2: modified top level files
changes = subprocess.run(['git', 'diff', '--name-only', old_commit, new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if any(os.sep not in change for change in changes.stdout.splitlines()):
print('[MARKUS] Error: modifying top level files is not allowed on master!')
exit(1)
## Instruction:
Fix top level hook to not use renames
## Code After:
import os
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
# no need to check if old_commit or new_commit are 0, master can't be deleted or created
if ref_name == 'refs/heads/master' and os.environ.get('REMOTE_USER') is not None: # push not coming from MarkUs
# check 1: created/deleted top level files/directories
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout:
print('[MARKUS] Error: creating/deleting top level files and directories is not allowed on master!')
exit(1)
# check 2: modified top level files
changes = subprocess.run(['git', 'diff', '--name-only', '--no-renames', old_commit, new_commit],
stdout=subprocess.PIPE, universal_newlines=True)
if any(os.sep not in change for change in changes.stdout.splitlines()):
print('[MARKUS] Error: modifying top level files is not allowed on master!')
exit(1)
|
import os
import subprocess
import sys
if __name__ == '__main__':
ref_name = sys.argv[1]
old_commit = sys.argv[2]
new_commit = sys.argv[3]
# no need to check if old_commit or new_commit are 0, master can't be deleted or created
if ref_name == 'refs/heads/master' and os.environ.get('REMOTE_USER') is not None: # push not coming from MarkUs
# check 1: created/deleted top level files/directories
old_ls = subprocess.run(['git', 'ls-tree', '--name-only', old_commit], stdout=subprocess.PIPE,
universal_newlines=True)
new_ls = subprocess.run(['git', 'ls-tree', '--name-only', new_commit], stdout=subprocess.PIPE,
universal_newlines=True)
if old_ls.stdout != new_ls.stdout:
print('[MARKUS] Error: creating/deleting top level files and directories is not allowed on master!')
exit(1)
# check 2: modified top level files
- changes = subprocess.run(['git', 'diff', '--name-only', old_commit, new_commit], stdout=subprocess.PIPE,
? ------------------------
+ changes = subprocess.run(['git', 'diff', '--name-only', '--no-renames', old_commit, new_commit],
? ++++++++++++++++
- universal_newlines=True)
+ stdout=subprocess.PIPE, universal_newlines=True)
? ++++++++++++++++++++++++
if any(os.sep not in change for change in changes.stdout.splitlines()):
print('[MARKUS] Error: modifying top level files is not allowed on master!')
exit(1)
|
9f42cd231375475d27c6fe298ec862065c34f8ca
|
armstrong/core/arm_sections/views.py
|
armstrong/core/arm_sections/views.py
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
Refactor SimpleSectionView to inherit DetailView
|
Refactor SimpleSectionView to inherit DetailView
|
Python
|
apache-2.0
|
armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
- from django.views.generic import TemplateView
+ from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
- class SimpleSectionView(TemplateView):
+ class SimpleSectionView(DetailView):
- well_title = None
+ context_object_name = 'section'
+ model = Section
+ def get_object(self, queryset=None):
+ return get_object_or_404(self.get_queryset(),
+ full_slug=self.kwargs['full_slug'])
- def get_section(self):
- return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
-
- def get_context_data(self, **kwargs):
- context = super(SimpleSectionView, self).get_context_data(**kwargs)
- context["section"] = self.get_section()
- return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
Refactor SimpleSectionView to inherit DetailView
|
## Code Before:
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
## Instruction:
Refactor SimpleSectionView to inherit DetailView
## Code After:
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
- from django.views.generic import TemplateView
? ^ ^^ ---
+ from django.views.generic import DetailView
? ^ ^^^
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
- class SimpleSectionView(TemplateView):
? ^ ^^ ---
+ class SimpleSectionView(DetailView):
? ^ ^^^
- well_title = None
+ context_object_name = 'section'
+ model = Section
+ def get_object(self, queryset=None):
+ return get_object_or_404(self.get_queryset(),
+ full_slug=self.kwargs['full_slug'])
- def get_section(self):
- return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
-
- def get_context_data(self, **kwargs):
- context = super(SimpleSectionView, self).get_context_data(**kwargs)
- context["section"] = self.get_section()
- return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
7b3267b2bae436e0580e2a229a64bd8d6a04bc1f
|
manila_ui/local/local_settings.d/_90_manila_shares.py
|
manila_ui/local/local_settings.d/_90_manila_shares.py
|
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
from django.conf import settings
settings.POLICY_FILES.update({
'share': 'manila_policy.json',
})
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
Define the default policy file
|
Define the default policy file
This change ensures that the default policy file for Manila API access
is defined by default, so that operators can deploy their own policy
more easily.
Change-Id: Ie890766ea2a274791393304cdfe532e024171195
|
Python
|
apache-2.0
|
openstack/manila-ui,openstack/manila-ui,openstack/manila-ui
|
+ from django.conf import settings
+
+
+ settings.POLICY_FILES.update({
+ 'share': 'manila_policy.json',
+ })
+
+ # The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
+ # the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
Define the default policy file
|
## Code Before:
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
## Instruction:
Define the default policy file
## Code After:
from django.conf import settings
settings.POLICY_FILES.update({
'share': 'manila_policy.json',
})
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
+ from django.conf import settings
+
+
+ settings.POLICY_FILES.update({
+ 'share': 'manila_policy.json',
+ })
+
+ # The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
+ # the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
|
721f18da4d38ac76171165596bc11e2572c60204
|
algebra.py
|
algebra.py
|
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
Fix bug where vector calculations returned Ints only
|
Fix bug where vector calculations returned Ints only
|
Python
|
mit
|
supermitch/clipycube
|
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
- return (int(x),
+ return (round(x, 1),
- int(math.cos(theta) * y - math.sin(theta) * z),
+ round(math.cos(theta) * y - math.sin(theta) * z, 1),
- int(math.sin(theta) * y + math.cos(theta) * z))
+ round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
- return (int(math.cos(theta) * x + math.sin(theta) * z),
+ return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
- int(y),
+ round(y, 1),
- int(-math.sin(theta) * x + math.cos(theta) * z))
+ round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
- return (int(math.cos(theta) * x - math.sin(theta) * y),
+ return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
- int(math.sin(theta) * x + math.cos(theta) * y),
+ round(math.sin(theta) * x + math.cos(theta) * y, 1),
- int(z))
+ round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
Fix bug where vector calculations returned Ints only
|
## Code Before:
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (int(x),
int(math.cos(theta) * y - math.sin(theta) * z),
int(math.sin(theta) * y + math.cos(theta) * z))
def Ry(x, y, z, theta):
return (int(math.cos(theta) * x + math.sin(theta) * z),
int(y),
int(-math.sin(theta) * x + math.cos(theta) * z))
def Rz(x, y, z, theta):
return (int(math.cos(theta) * x - math.sin(theta) * y),
int(math.sin(theta) * x + math.cos(theta) * y),
int(z))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
## Instruction:
Fix bug where vector calculations returned Ints only
## Code After:
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
return (round(x, 1),
round(math.cos(theta) * y - math.sin(theta) * z, 1),
round(math.sin(theta) * y + math.cos(theta) * z, 1))
def Ry(x, y, z, theta):
return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
round(y, 1),
round(-math.sin(theta) * x + math.cos(theta) * z, 1))
def Rz(x, y, z, theta):
return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
round(math.sin(theta) * x + math.cos(theta) * y, 1),
round(z, 1))
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
import math
def rotation(point, axis, sign=1):
"""
Rotate a point (or vector) about the origin in 3D space.
"""
def Rx(x, y, z, theta):
- return (int(x),
? ^ ^
+ return (round(x, 1),
? ^^^ ^ +++
- int(math.cos(theta) * y - math.sin(theta) * z),
? ^ ^
+ round(math.cos(theta) * y - math.sin(theta) * z, 1),
? ^^^ ^ +++
- int(math.sin(theta) * y + math.cos(theta) * z))
? ^ ^
+ round(math.sin(theta) * y + math.cos(theta) * z, 1))
? ^^^ ^ +++
def Ry(x, y, z, theta):
- return (int(math.cos(theta) * x + math.sin(theta) * z),
? ^ ^
+ return (round(math.cos(theta) * x + math.sin(theta) * z, 1),
? ^^^ ^ +++
- int(y),
? ^ ^
+ round(y, 1),
? ^^^ ^ +++
- int(-math.sin(theta) * x + math.cos(theta) * z))
? ^ ^
+ round(-math.sin(theta) * x + math.cos(theta) * z, 1))
? ^^^ ^ +++
def Rz(x, y, z, theta):
- return (int(math.cos(theta) * x - math.sin(theta) * y),
? ^ ^
+ return (round(math.cos(theta) * x - math.sin(theta) * y, 1),
? ^^^ ^ +++
- int(math.sin(theta) * x + math.cos(theta) * y),
? ^ ^
+ round(math.sin(theta) * x + math.cos(theta) * y, 1),
? ^^^ ^ +++
- int(z))
? ^ ^
+ round(z, 1))
? ^^^ ^ +++
R = {'x': Rx, 'y': Ry, 'z': Rz}[axis] # Select a rotation matrix
theta = sign * math.pi / 2 # Always 90 degrees
x, y, z = point # Assumes 3D point or vector
return R(x, y, z, theta) # Calculate our new normal vector
|
0da7c7931f7abc8775087aab5054cfea63120f60
|
scripts/dumpcmaps.py
|
scripts/dumpcmaps.py
|
import numpy as np
import Image
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
|
import numpy as np
import Image
import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
|
Add all the J* colormaps
|
Add all the J* colormaps
|
Python
|
bsd-2-clause
|
gallantlab/pycortex,smerdis/pycortex,smerdis/pycortex,CVML/pycortex,gallantlab/pycortex,CVML/pycortex,smerdis/pycortex,CVML/pycortex,CVML/pycortex,CVML/pycortex,gallantlab/pycortex,smerdis/pycortex,smerdis/pycortex,gallantlab/pycortex,gallantlab/pycortex
|
import numpy as np
import Image
+ import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
|
Add all the J* colormaps
|
## Code Before:
import numpy as np
import Image
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
## Instruction:
Add all the J* colormaps
## Code After:
import numpy as np
import Image
import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
|
import numpy as np
import Image
+ import scipy.io as sio
def makeImage(cmap, fname):
cmarr = (cmap*255).astype(np.uint8)
im = Image.fromarray(cmarr[np.newaxis])
im.save(fname)
def cmList(additional):
cmaps = {}
values = np.linspace(0, 1, 256)
from matplotlib import cm, colors
for cmname in dir(cm):
cmap = getattr(cm, cmname)
if isinstance(cmap, colors.Colormap):
cmaps[cmname] = cmap(values)
for name, cmap in additional.items():
cmaps[name] = colors.LinearSegmentedColormap.from_list(name, cmap)(values)
return cmaps
if __name__ == "__main__":
import os
import sys
path = sys.argv[1]
matfile = sio.loadmat("/auto/k2/share/mritools_store/colormaps.mat")
del matfile['__globals__']
del matfile['__header__']
del matfile['__version__']
for name, cm in cmList(matfile).items():
fname = os.path.join(path, "%s.png"%name)
makeImage(cm, fname)
|
92676c0e84df0e1c0d14766b339410d09c60b5fb
|
froide/helper/forms.py
|
froide/helper/forms.py
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
Make empty tag form valid
|
Make empty tag form valid
|
Python
|
mit
|
stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
+ required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
Make empty tag form valid
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
## Instruction:
Make empty tag form valid
## Code After:
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
+ required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
e45f394c61620db13bae579a29043dfdd6ae2d0f
|
SLA_bot/alertfeed.py
|
SLA_bot/alertfeed.py
|
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
return '```fix\n{}\n```'.format(text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
code_color = 'fix' if len(lines) >= 10 else ''
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
return '```{}\n{}\n```'.format(code_color, text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
Remove text coloring in AlertFeed if it seems like scheduled text
|
Remove text coloring in AlertFeed if it seems like scheduled text
|
Python
|
mit
|
EsqWiggles/SLA-bot,EsqWiggles/SLA-bot
|
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
+ code_color = 'fix' if len(lines) >= 10 else ''
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
- return '```fix\n{}\n```'.format(text)
+ return '```{}\n{}\n```'.format(code_color, text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
Remove text coloring in AlertFeed if it seems like scheduled text
|
## Code Before:
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
return '```fix\n{}\n```'.format(text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
## Instruction:
Remove text coloring in AlertFeed if it seems like scheduled text
## Code After:
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
code_color = 'fix' if len(lines) >= 10 else ''
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
return '```{}\n{}\n```'.format(code_color, text)
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
import asyncio
import json
import aiohttp
import SLA_bot.config as cf
class AlertFeed:
source_url = 'http://pso2emq.flyergo.eu/api/v2/'
async def download(url):
try:
async with aiohttp.get(url) as response:
return await response.json()
except json.decoder.JSONDecodeError:
pass
def parse_data(data):
latest_alert = data[0]['text']
lines = latest_alert.splitlines()
+ code_color = 'fix' if len(lines) >= 10 else ''
header = '-' * len(lines[0])
lines.insert(1, header)
text = '\n'.join(lines)
- return '```fix\n{}\n```'.format(text)
? ^^^
+ return '```{}\n{}\n```'.format(code_color, text)
? ^^ ++++++++++++
async def fetch():
header = cf.get('PSO2 Feed', 'header')
raw_data = await AlertFeed.download(AlertFeed.source_url)
return '** **\n' + header + '\n' + AlertFeed.parse_data(raw_data)
|
7729c90679a74f268d7b0fd88c954fb583830794
|
parser.py
|
parser.py
|
import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
def parse(self, url):
content = webquery.urlcontent(url)
root = etree.HTML(content, base_url=url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
data['url'] = url
return data
|
import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
def canonical_url(self, url):
"""By overriding this method canonical url can be used"""
return url
def parse(self, url):
canonical_url = self.canonical_url(url)
content = webquery.urlcontent(canonical_url)
root = etree.HTML(content, base_url=canonical_url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
data['url'] = canonical_url
return data
|
Add ability to customize URL
|
Add ability to customize URL
|
Python
|
apache-2.0
|
shiplu/webxpath
|
import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
+ def canonical_url(self, url):
+ """By overriding this method canonical url can be used"""
+ return url
+
def parse(self, url):
+ canonical_url = self.canonical_url(url)
- content = webquery.urlcontent(url)
+ content = webquery.urlcontent(canonical_url)
- root = etree.HTML(content, base_url=url)
+ root = etree.HTML(content, base_url=canonical_url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
- data['url'] = url
+ data['url'] = canonical_url
return data
|
Add ability to customize URL
|
## Code Before:
import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
def parse(self, url):
content = webquery.urlcontent(url)
root = etree.HTML(content, base_url=url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
data['url'] = url
return data
## Instruction:
Add ability to customize URL
## Code After:
import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
def canonical_url(self, url):
"""By overriding this method canonical url can be used"""
return url
def parse(self, url):
canonical_url = self.canonical_url(url)
content = webquery.urlcontent(canonical_url)
root = etree.HTML(content, base_url=canonical_url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
data['url'] = canonical_url
return data
|
import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
+ def canonical_url(self, url):
+ """By overriding this method canonical url can be used"""
+ return url
+
def parse(self, url):
+ canonical_url = self.canonical_url(url)
- content = webquery.urlcontent(url)
+ content = webquery.urlcontent(canonical_url)
? ++++++++++
- root = etree.HTML(content, base_url=url)
+ root = etree.HTML(content, base_url=canonical_url)
? ++++++++++
data = {name: expr.parse(root) for name, expr in self.fields.items()}
- data['url'] = url
+ data['url'] = canonical_url
? ++++++++++
return data
|
ce3249dea725d40d5e0916b344cdde53ab6d53dc
|
src/satosa/micro_services/processors/scope_extractor_processor.py
|
src/satosa/micro_services/processors/scope_extractor_processor.py
|
from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning
from .base_processor import BaseProcessor
CONFIG_KEY_MAPPEDATTRIBUTE = 'mapped_attribute'
CONFIG_DEFAULT_MAPPEDATTRIBUTE = ''
class ScopeExtractorProcessor(BaseProcessor):
"""
Extracts the scope from a scoped attribute and maps that to
another attribute
Example configuration:
module: satosa.micro_services.attribute_processor.AttributeProcessor
name: AttributeProcessor
config:
process:
- attribute: scoped_affiliation
processors:
- name: ScopeExtractorProcessor
module: satosa.micro_services.processors.scope_extractor_processor
mapped_attribute: domain
"""
def process(self, internal_data, attribute, **kwargs):
mapped_attribute = kwargs.get(CONFIG_KEY_MAPPEDATTRIBUTE, CONFIG_DEFAULT_MAPPEDATTRIBUTE)
if mapped_attribute is None or mapped_attribute == '':
raise AttributeProcessorError("The mapped_attribute needs to be set")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not values:
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute))
if not any('@' in val for val in values):
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute))
for value in values:
if '@' in value:
scope = value.split('@')[1]
attributes[mapped_attribute] = [scope]
break
|
from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning
from .base_processor import BaseProcessor
CONFIG_KEY_MAPPEDATTRIBUTE = 'mapped_attribute'
CONFIG_DEFAULT_MAPPEDATTRIBUTE = ''
class ScopeExtractorProcessor(BaseProcessor):
"""
Extracts the scope from a scoped attribute and maps that to
another attribute
Example configuration:
module: satosa.micro_services.attribute_processor.AttributeProcessor
name: AttributeProcessor
config:
process:
- attribute: scoped_affiliation
processors:
- name: ScopeExtractorProcessor
module: satosa.micro_services.processors.scope_extractor_processor
mapped_attribute: domain
"""
def process(self, internal_data, attribute, **kwargs):
mapped_attribute = kwargs.get(CONFIG_KEY_MAPPEDATTRIBUTE, CONFIG_DEFAULT_MAPPEDATTRIBUTE)
if mapped_attribute is None or mapped_attribute == '':
raise AttributeProcessorError("The mapped_attribute needs to be set")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not values:
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute))
if not isinstance(values, list):
values = [values]
if not any('@' in val for val in values):
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute))
for value in values:
if '@' in value:
scope = value.split('@')[1]
attributes[mapped_attribute] = [scope]
break
|
Make the ScopeExtractorProcessor usable for the Primary Identifier
|
Make the ScopeExtractorProcessor usable for the Primary Identifier
This patch adds support to use the ScopeExtractorProcessor on the Primary
Identifiert which is, in contrast to the other values, a string.
Closes #348
|
Python
|
apache-2.0
|
SUNET/SATOSA,SUNET/SATOSA,its-dirg/SATOSA
|
from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning
from .base_processor import BaseProcessor
CONFIG_KEY_MAPPEDATTRIBUTE = 'mapped_attribute'
CONFIG_DEFAULT_MAPPEDATTRIBUTE = ''
class ScopeExtractorProcessor(BaseProcessor):
"""
Extracts the scope from a scoped attribute and maps that to
another attribute
Example configuration:
module: satosa.micro_services.attribute_processor.AttributeProcessor
name: AttributeProcessor
config:
process:
- attribute: scoped_affiliation
processors:
- name: ScopeExtractorProcessor
module: satosa.micro_services.processors.scope_extractor_processor
mapped_attribute: domain
"""
def process(self, internal_data, attribute, **kwargs):
mapped_attribute = kwargs.get(CONFIG_KEY_MAPPEDATTRIBUTE, CONFIG_DEFAULT_MAPPEDATTRIBUTE)
if mapped_attribute is None or mapped_attribute == '':
raise AttributeProcessorError("The mapped_attribute needs to be set")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not values:
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute))
+ if not isinstance(values, list):
+ values = [values]
if not any('@' in val for val in values):
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute))
for value in values:
if '@' in value:
scope = value.split('@')[1]
attributes[mapped_attribute] = [scope]
break
|
Make the ScopeExtractorProcessor usable for the Primary Identifier
|
## Code Before:
from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning
from .base_processor import BaseProcessor
CONFIG_KEY_MAPPEDATTRIBUTE = 'mapped_attribute'
CONFIG_DEFAULT_MAPPEDATTRIBUTE = ''
class ScopeExtractorProcessor(BaseProcessor):
"""
Extracts the scope from a scoped attribute and maps that to
another attribute
Example configuration:
module: satosa.micro_services.attribute_processor.AttributeProcessor
name: AttributeProcessor
config:
process:
- attribute: scoped_affiliation
processors:
- name: ScopeExtractorProcessor
module: satosa.micro_services.processors.scope_extractor_processor
mapped_attribute: domain
"""
def process(self, internal_data, attribute, **kwargs):
mapped_attribute = kwargs.get(CONFIG_KEY_MAPPEDATTRIBUTE, CONFIG_DEFAULT_MAPPEDATTRIBUTE)
if mapped_attribute is None or mapped_attribute == '':
raise AttributeProcessorError("The mapped_attribute needs to be set")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not values:
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute))
if not any('@' in val for val in values):
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute))
for value in values:
if '@' in value:
scope = value.split('@')[1]
attributes[mapped_attribute] = [scope]
break
## Instruction:
Make the ScopeExtractorProcessor usable for the Primary Identifier
## Code After:
from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning
from .base_processor import BaseProcessor
CONFIG_KEY_MAPPEDATTRIBUTE = 'mapped_attribute'
CONFIG_DEFAULT_MAPPEDATTRIBUTE = ''
class ScopeExtractorProcessor(BaseProcessor):
"""
Extracts the scope from a scoped attribute and maps that to
another attribute
Example configuration:
module: satosa.micro_services.attribute_processor.AttributeProcessor
name: AttributeProcessor
config:
process:
- attribute: scoped_affiliation
processors:
- name: ScopeExtractorProcessor
module: satosa.micro_services.processors.scope_extractor_processor
mapped_attribute: domain
"""
def process(self, internal_data, attribute, **kwargs):
mapped_attribute = kwargs.get(CONFIG_KEY_MAPPEDATTRIBUTE, CONFIG_DEFAULT_MAPPEDATTRIBUTE)
if mapped_attribute is None or mapped_attribute == '':
raise AttributeProcessorError("The mapped_attribute needs to be set")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not values:
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute))
if not isinstance(values, list):
values = [values]
if not any('@' in val for val in values):
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute))
for value in values:
if '@' in value:
scope = value.split('@')[1]
attributes[mapped_attribute] = [scope]
break
|
from ..attribute_processor import AttributeProcessorError, AttributeProcessorWarning
from .base_processor import BaseProcessor
CONFIG_KEY_MAPPEDATTRIBUTE = 'mapped_attribute'
CONFIG_DEFAULT_MAPPEDATTRIBUTE = ''
class ScopeExtractorProcessor(BaseProcessor):
"""
Extracts the scope from a scoped attribute and maps that to
another attribute
Example configuration:
module: satosa.micro_services.attribute_processor.AttributeProcessor
name: AttributeProcessor
config:
process:
- attribute: scoped_affiliation
processors:
- name: ScopeExtractorProcessor
module: satosa.micro_services.processors.scope_extractor_processor
mapped_attribute: domain
"""
def process(self, internal_data, attribute, **kwargs):
mapped_attribute = kwargs.get(CONFIG_KEY_MAPPEDATTRIBUTE, CONFIG_DEFAULT_MAPPEDATTRIBUTE)
if mapped_attribute is None or mapped_attribute == '':
raise AttributeProcessorError("The mapped_attribute needs to be set")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not values:
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it has no values".format(attribute))
+ if not isinstance(values, list):
+ values = [values]
if not any('@' in val for val in values):
raise AttributeProcessorWarning("Cannot apply scope_extractor to {}, it's values are not scoped".format(attribute))
for value in values:
if '@' in value:
scope = value.split('@')[1]
attributes[mapped_attribute] = [scope]
break
|
bd18f52c2ee41bbc9c33a3b98fdac1ce2ea18ea7
|
rest/urls.py
|
rest/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/[0-9a-fA-F\-]+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
Revert "Handle second service UUID better."
|
Revert "Handle second service UUID better."
Realized I actually made the url parsing worse, this isn't what we wanted.
|
Python
|
apache-2.0
|
CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project,CMPUT404W17T06/CMPUT404-project
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
- r'(?P<other>[\w\-\.]+(:\d{2,5})?(/[0-9a-fA-F\-]+)*/)$',
+ r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
Revert "Handle second service UUID better."
|
## Code Before:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/[0-9a-fA-F\-]+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
## Instruction:
Revert "Handle second service UUID better."
## Code After:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^posts/$', views.PostsView.as_view(), name='posts'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/$', views.PostView.as_view(),
name='post'),
url(r'^posts/(?P<pid>[0-9a-fA-F\-]+)/comments/$',
views.CommentView.as_view(), name='comments'),
url(r'^author/posts/$', views.PostsView.as_view(), name='authorpost'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/$', views.AuthorView.as_view(),
name='author'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/$',
views.AuthorFriendsView.as_view(), name='friends'),
url(r'^friendrequest/$', views.FriendRequestView.as_view(),
name='friendrequest'),
url(r'^author/(?P<aid>[0-9a-fA-F\-]+)/friends/'
- r'(?P<other>[\w\-\.]+(:\d{2,5})?(/[0-9a-fA-F\-]+)*/)$',
? ---------- ^^
+ r'(?P<other>[\w\-\.]+(:\d{2,5})?(/\w+)*/)$',
? ^
views.AuthorIsFriendsView.as_view(), name='isfriends')
]
|
4b863a659e36b1fa9887847e9dbb133b1852cf9b
|
examples/miniapps/bundles/run.py
|
examples/miniapps/bundles/run.py
|
"""Run 'Bundles' example application."""
import sqlite3
import boto3
from dependency_injector import containers
from dependency_injector import providers
from bundles.users import Users
from bundles.photos import Photos
class Core(containers.DeclarativeContainer):
"""Core container."""
config = providers.Configuration('config')
sqlite = providers.Singleton(sqlite3.connect, config.database.dsn)
s3 = providers.Singleton(
boto3.client, 's3',
aws_access_key_id=config.aws.access_key_id,
aws_secret_access_key=config.aws.secret_access_key)
if __name__ == '__main__':
# Initializing containers
core = Core()
core.config.update({'database': {'dsn': ':memory:'},
'aws': {'access_key_id': 'KEY',
'secret_access_key': 'SECRET'}})
users = Users(database=core.sqlite)
photos = Photos(database=core.sqlite, file_storage=core.s3)
# Fetching few users
user_repository = users.user_repository()
user1 = user_repository.get(id=1)
user2 = user_repository.get(id=2)
# Making some checks
assert user1.id == 1
assert user2.id == 2
assert user_repository.db is core.sqlite()
|
"""Run 'Bundles' example application."""
import sqlite3
import boto3
from dependency_injector import containers
from dependency_injector import providers
from bundles.users import Users
from bundles.photos import Photos
class Core(containers.DeclarativeContainer):
"""Core container."""
config = providers.Configuration('config')
sqlite = providers.Singleton(sqlite3.connect, config.database.dsn)
s3 = providers.Singleton(
boto3.client, 's3',
aws_access_key_id=config.aws.access_key_id,
aws_secret_access_key=config.aws.secret_access_key)
if __name__ == '__main__':
# Initializing containers
core = Core(config={'database': {'dsn': ':memory:'},
'aws': {'access_key_id': 'KEY',
'secret_access_key': 'SECRET'}})
users = Users(database=core.sqlite)
photos = Photos(database=core.sqlite, file_storage=core.s3)
# Fetching few users
user_repository = users.user_repository()
user1 = user_repository.get(id=1)
user2 = user_repository.get(id=2)
# Making some checks
assert user1.id == 1
assert user2.id == 2
assert user_repository.db is core.sqlite()
|
Update bundles example after configuration provider refactoring
|
Update bundles example after configuration provider refactoring
|
Python
|
bsd-3-clause
|
ets-labs/dependency_injector,ets-labs/python-dependency-injector,rmk135/objects,rmk135/dependency_injector
|
"""Run 'Bundles' example application."""
import sqlite3
import boto3
from dependency_injector import containers
from dependency_injector import providers
from bundles.users import Users
from bundles.photos import Photos
class Core(containers.DeclarativeContainer):
"""Core container."""
config = providers.Configuration('config')
sqlite = providers.Singleton(sqlite3.connect, config.database.dsn)
s3 = providers.Singleton(
boto3.client, 's3',
aws_access_key_id=config.aws.access_key_id,
aws_secret_access_key=config.aws.secret_access_key)
if __name__ == '__main__':
# Initializing containers
- core = Core()
- core.config.update({'database': {'dsn': ':memory:'},
+ core = Core(config={'database': {'dsn': ':memory:'},
'aws': {'access_key_id': 'KEY',
'secret_access_key': 'SECRET'}})
users = Users(database=core.sqlite)
photos = Photos(database=core.sqlite, file_storage=core.s3)
# Fetching few users
user_repository = users.user_repository()
user1 = user_repository.get(id=1)
user2 = user_repository.get(id=2)
# Making some checks
assert user1.id == 1
assert user2.id == 2
assert user_repository.db is core.sqlite()
|
Update bundles example after configuration provider refactoring
|
## Code Before:
"""Run 'Bundles' example application."""
import sqlite3
import boto3
from dependency_injector import containers
from dependency_injector import providers
from bundles.users import Users
from bundles.photos import Photos
class Core(containers.DeclarativeContainer):
"""Core container."""
config = providers.Configuration('config')
sqlite = providers.Singleton(sqlite3.connect, config.database.dsn)
s3 = providers.Singleton(
boto3.client, 's3',
aws_access_key_id=config.aws.access_key_id,
aws_secret_access_key=config.aws.secret_access_key)
if __name__ == '__main__':
# Initializing containers
core = Core()
core.config.update({'database': {'dsn': ':memory:'},
'aws': {'access_key_id': 'KEY',
'secret_access_key': 'SECRET'}})
users = Users(database=core.sqlite)
photos = Photos(database=core.sqlite, file_storage=core.s3)
# Fetching few users
user_repository = users.user_repository()
user1 = user_repository.get(id=1)
user2 = user_repository.get(id=2)
# Making some checks
assert user1.id == 1
assert user2.id == 2
assert user_repository.db is core.sqlite()
## Instruction:
Update bundles example after configuration provider refactoring
## Code After:
"""Run 'Bundles' example application."""
import sqlite3
import boto3
from dependency_injector import containers
from dependency_injector import providers
from bundles.users import Users
from bundles.photos import Photos
class Core(containers.DeclarativeContainer):
"""Core container."""
config = providers.Configuration('config')
sqlite = providers.Singleton(sqlite3.connect, config.database.dsn)
s3 = providers.Singleton(
boto3.client, 's3',
aws_access_key_id=config.aws.access_key_id,
aws_secret_access_key=config.aws.secret_access_key)
if __name__ == '__main__':
# Initializing containers
core = Core(config={'database': {'dsn': ':memory:'},
'aws': {'access_key_id': 'KEY',
'secret_access_key': 'SECRET'}})
users = Users(database=core.sqlite)
photos = Photos(database=core.sqlite, file_storage=core.s3)
# Fetching few users
user_repository = users.user_repository()
user1 = user_repository.get(id=1)
user2 = user_repository.get(id=2)
# Making some checks
assert user1.id == 1
assert user2.id == 2
assert user_repository.db is core.sqlite()
|
"""Run 'Bundles' example application."""
import sqlite3
import boto3
from dependency_injector import containers
from dependency_injector import providers
from bundles.users import Users
from bundles.photos import Photos
class Core(containers.DeclarativeContainer):
"""Core container."""
config = providers.Configuration('config')
sqlite = providers.Singleton(sqlite3.connect, config.database.dsn)
s3 = providers.Singleton(
boto3.client, 's3',
aws_access_key_id=config.aws.access_key_id,
aws_secret_access_key=config.aws.secret_access_key)
if __name__ == '__main__':
# Initializing containers
- core = Core()
- core.config.update({'database': {'dsn': ':memory:'},
? ^ ^^^^^^^^
+ core = Core(config={'database': {'dsn': ':memory:'},
? ^^^^^^^^ ^
'aws': {'access_key_id': 'KEY',
'secret_access_key': 'SECRET'}})
users = Users(database=core.sqlite)
photos = Photos(database=core.sqlite, file_storage=core.s3)
# Fetching few users
user_repository = users.user_repository()
user1 = user_repository.get(id=1)
user2 = user_repository.get(id=2)
# Making some checks
assert user1.id == 1
assert user2.id == 2
assert user_repository.db is core.sqlite()
|
e637e5f53990709ed654b661465685ad9d05a182
|
api/spawner/templates/constants.py
|
api/spawner/templates/constants.py
|
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
Update cluster config map key format
|
Update cluster config map key format
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
- CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
+ CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
Update cluster config map key format
|
## Code Before:
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
## Instruction:
Update cluster config map key format
## Code After:
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
- CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
? ------------------
+ CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
86fa8271b5788aadcbbde3decbcd413b9d22871c
|
util/namespace.py
|
util/namespace.py
|
from _compat import *
class Namespace(object):
"""
Backport of SimpleNamespace() class added in Python 3.3
"""
__slots__ = '__doc__', '__dict__', '__weakref__'
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
__hash__ = None
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
|
from _compat import *
class Namespace(object):
"""
Backport of SimpleNamespace() class added in Python 3.3
"""
__slots__ = '__doc__', '__dict__', '__weakref__'
def __init__(self, **kwargs):
super(Namespace, self).__init__()
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
__hash__ = None
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
|
Call super __init__ from Namespace.__init__
|
util: Call super __init__ from Namespace.__init__
|
Python
|
unknown
|
embox/mybuild,abusalimov/mybuild,embox/mybuild,abusalimov/mybuild
|
from _compat import *
class Namespace(object):
"""
Backport of SimpleNamespace() class added in Python 3.3
"""
__slots__ = '__doc__', '__dict__', '__weakref__'
def __init__(self, **kwargs):
+ super(Namespace, self).__init__()
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
__hash__ = None
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
|
Call super __init__ from Namespace.__init__
|
## Code Before:
from _compat import *
class Namespace(object):
"""
Backport of SimpleNamespace() class added in Python 3.3
"""
__slots__ = '__doc__', '__dict__', '__weakref__'
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
__hash__ = None
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
## Instruction:
Call super __init__ from Namespace.__init__
## Code After:
from _compat import *
class Namespace(object):
"""
Backport of SimpleNamespace() class added in Python 3.3
"""
__slots__ = '__doc__', '__dict__', '__weakref__'
def __init__(self, **kwargs):
super(Namespace, self).__init__()
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
__hash__ = None
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
|
from _compat import *
class Namespace(object):
"""
Backport of SimpleNamespace() class added in Python 3.3
"""
__slots__ = '__doc__', '__dict__', '__weakref__'
def __init__(self, **kwargs):
+ super(Namespace, self).__init__()
self.__dict__.update(kwargs)
def __iter__(self):
return iter(self.__dict__)
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
__hash__ = None
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
keys = sorted(self.__dict__)
items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
return "{}({})".format(type(self).__name__, ", ".join(items))
|
584891ce58c3e979a5d6871ba7a6ff0a9e01d780
|
routes/student_vote.py
|
routes/student_vote.py
|
from aiohttp import web
from db_helper import get_project_id, get_most_recent_group, get_user_id
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
if project.group is not get_most_recent_group(session):
return web.Response(status=403, text="Cannot join legacy projects")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
|
from aiohttp import web
from db_helper import get_project_id, get_user_id, can_choose_project
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
if not can_choose_project(session, cookies, project):
return web.Response(status=403, text="You cannot choose this project")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
|
Check if student can choose a project before allowing them to join it
|
Check if student can choose a project before allowing them to join it
|
Python
|
agpl-3.0
|
wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp
|
from aiohttp import web
- from db_helper import get_project_id, get_most_recent_group, get_user_id
+ from db_helper import get_project_id, get_user_id, can_choose_project
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
- if project.group is not get_most_recent_group(session):
+ if not can_choose_project(session, cookies, project):
- return web.Response(status=403, text="Cannot join legacy projects")
+ return web.Response(status=403, text="You cannot choose this project")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
|
Check if student can choose a project before allowing them to join it
|
## Code Before:
from aiohttp import web
from db_helper import get_project_id, get_most_recent_group, get_user_id
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
if project.group is not get_most_recent_group(session):
return web.Response(status=403, text="Cannot join legacy projects")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
## Instruction:
Check if student can choose a project before allowing them to join it
## Code After:
from aiohttp import web
from db_helper import get_project_id, get_user_id, can_choose_project
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
if not can_choose_project(session, cookies, project):
return web.Response(status=403, text="You cannot choose this project")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
|
from aiohttp import web
- from db_helper import get_project_id, get_most_recent_group, get_user_id
+ from db_helper import get_project_id, get_user_id, can_choose_project
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
- if project.group is not get_most_recent_group(session):
+ if not can_choose_project(session, cookies, project):
- return web.Response(status=403, text="Cannot join legacy projects")
? ^ ^ ^^^^^^^^ -
+ return web.Response(status=403, text="You cannot choose this project")
? ^^^^^ ^^ ++++++ ^
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
|
0524817b152b4e3211d5d8101c661a54578e5888
|
dmoj/checkers/standard.py
|
dmoj/checkers/standard.py
|
def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
|
from ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
|
Remove untested checker code path
|
Remove untested checker code path
|
Python
|
agpl-3.0
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
+ from ._checker import standard
- def check(process_output, judge_output, **kwargs):
- from six.moves import zip
- process_lines = list(filter(None, process_output.split(b'\n')))
- judge_lines = list(filter(None, judge_output.split(b'\n')))
- if len(process_lines) != len(judge_lines):
- return False
- for process_line, judge_line in zip(process_lines, judge_lines):
- if process_line.split() != judge_line.split():
- return False
- return True
- try:
- from ._checker import standard
- except ImportError as e:
- pass
- else:
- def check(process_output, judge_output, _checker=standard, **kwargs):
+ def check(process_output, judge_output, _checker=standard, **kwargs):
- return _checker(judge_output, process_output)
+ return _checker(judge_output, process_output)
- del standard
+ del standard
|
Remove untested checker code path
|
## Code Before:
def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
## Instruction:
Remove untested checker code path
## Code After:
from ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
|
+ from ._checker import standard
- def check(process_output, judge_output, **kwargs):
- from six.moves import zip
- process_lines = list(filter(None, process_output.split(b'\n')))
- judge_lines = list(filter(None, judge_output.split(b'\n')))
- if len(process_lines) != len(judge_lines):
- return False
- for process_line, judge_line in zip(process_lines, judge_lines):
- if process_line.split() != judge_line.split():
- return False
- return True
- try:
- from ._checker import standard
- except ImportError as e:
- pass
- else:
- def check(process_output, judge_output, _checker=standard, **kwargs):
? ----
+ def check(process_output, judge_output, _checker=standard, **kwargs):
- return _checker(judge_output, process_output)
? ----
+ return _checker(judge_output, process_output)
- del standard
? ----
+ del standard
|
6611641fec2342fa8dcfdbf12d74558df65ed2eb
|
isserviceup/services/heroku.py
|
isserviceup/services/heroku.py
|
import requests
from isserviceup.services.models.service import Service, Status
class Heroku(Service):
name = 'Heroku'
status_url = 'https://status.heroku.com/'
icon_url = '/images/icons/heroku.png'
def get_status(self):
r = requests.get('https://status.heroku.com/api/v3/current-status')
res = r.json()
status = res['status']['Production']
if status == 'green':
return Status.ok
elif status == 'yellow':
return Status.minor
elif status == 'orange':
return Status.major
elif status == 'red':
return Status.critical
|
import requests
from isserviceup.services.models.service import Service, Status
class Heroku(Service):
name = 'Heroku'
status_url = 'https://status.heroku.com/'
icon_url = '/images/icons/heroku.png'
def get_status(self):
r = requests.get('https://status.heroku.com/api/v3/current-status')
res = r.json()
status = res['status']['Production']
if status == 'green':
return Status.ok
elif status == 'yellow':
return Status.minor
elif status == 'orange':
return Status.major
elif status == 'red':
return Status.critical
else:
raise Exception('unexpected status')
|
Raise exception for unexpected status
|
Raise exception for unexpected status
|
Python
|
apache-2.0
|
marcopaz/is-service-up,marcopaz/is-service-up,marcopaz/is-service-up
|
import requests
from isserviceup.services.models.service import Service, Status
class Heroku(Service):
name = 'Heroku'
status_url = 'https://status.heroku.com/'
icon_url = '/images/icons/heroku.png'
def get_status(self):
r = requests.get('https://status.heroku.com/api/v3/current-status')
res = r.json()
status = res['status']['Production']
if status == 'green':
return Status.ok
elif status == 'yellow':
return Status.minor
elif status == 'orange':
return Status.major
elif status == 'red':
return Status.critical
+ else:
+ raise Exception('unexpected status')
+
|
Raise exception for unexpected status
|
## Code Before:
import requests
from isserviceup.services.models.service import Service, Status
class Heroku(Service):
name = 'Heroku'
status_url = 'https://status.heroku.com/'
icon_url = '/images/icons/heroku.png'
def get_status(self):
r = requests.get('https://status.heroku.com/api/v3/current-status')
res = r.json()
status = res['status']['Production']
if status == 'green':
return Status.ok
elif status == 'yellow':
return Status.minor
elif status == 'orange':
return Status.major
elif status == 'red':
return Status.critical
## Instruction:
Raise exception for unexpected status
## Code After:
import requests
from isserviceup.services.models.service import Service, Status
class Heroku(Service):
name = 'Heroku'
status_url = 'https://status.heroku.com/'
icon_url = '/images/icons/heroku.png'
def get_status(self):
r = requests.get('https://status.heroku.com/api/v3/current-status')
res = r.json()
status = res['status']['Production']
if status == 'green':
return Status.ok
elif status == 'yellow':
return Status.minor
elif status == 'orange':
return Status.major
elif status == 'red':
return Status.critical
else:
raise Exception('unexpected status')
|
import requests
from isserviceup.services.models.service import Service, Status
class Heroku(Service):
name = 'Heroku'
status_url = 'https://status.heroku.com/'
icon_url = '/images/icons/heroku.png'
def get_status(self):
r = requests.get('https://status.heroku.com/api/v3/current-status')
res = r.json()
status = res['status']['Production']
if status == 'green':
return Status.ok
elif status == 'yellow':
return Status.minor
elif status == 'orange':
return Status.major
elif status == 'red':
return Status.critical
+ else:
+ raise Exception('unexpected status')
+
|
f828ac9ee5082a9a0b5e215c4c814e7f35db11b6
|
planetstack/core/models/__init__.py
|
planetstack/core/models/__init__.py
|
from .plcorebase import PlCoreBase
from .planetstack import PlanetStack
from .project import Project
from .singletonmodel import SingletonModel
from .service import Service
from .service import ServiceAttribute
from .tag import Tag
from .role import Role
from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments
from .dashboard import DashboardView
from .user import User, UserDashboardView
from .serviceclass import ServiceClass
from .slice import Slice, SliceDeployments
from .site import SitePrivilege, SiteDeployments
from .userdeployments import UserDeployments
from .image import Image, ImageDeployments
from .node import Node
from .serviceresource import ServiceResource
from .slice import SliceRole
from .slice import SlicePrivilege
from .site import SiteRole
from .site import SitePrivilege
from .planetstack import PlanetStackRole
from .planetstack import PlanetStackPrivilege
from .slicetag import SliceTag
from .flavor import Flavor
from .sliver import Sliver
from .reservation import ReservedResource
from .reservation import Reservation
from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments
from .billing import Account, Invoice, Charge, UsableObject, Payment
|
from .plcorebase import PlCoreBase
from .planetstack import PlanetStack
from .project import Project
from .singletonmodel import SingletonModel
from .service import Service
from .service import ServiceAttribute
from .tag import Tag
from .role import Role
from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments
from .dashboard import DashboardView
from .user import User, UserDashboardView
from .serviceclass import ServiceClass
from .slice import Slice, SliceDeployments
from .site import SitePrivilege, SiteDeployments
from .userdeployments import UserDeployments
from .image import Image, ImageDeployments
from .node import Node
from .serviceresource import ServiceResource
from .slice import SliceRole
from .slice import SlicePrivilege
from .credential import UserCredential,SiteCredential,SliceCredential
from .site import SiteRole
from .site import SitePrivilege
from .planetstack import PlanetStackRole
from .planetstack import PlanetStackPrivilege
from .slicetag import SliceTag
from .flavor import Flavor
from .sliver import Sliver
from .reservation import ReservedResource
from .reservation import Reservation
from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments
from .billing import Account, Invoice, Charge, UsableObject, Payment
|
Add credentials module to core list
|
Add credentials module to core list
|
Python
|
apache-2.0
|
wathsalav/xos,wathsalav/xos,wathsalav/xos,wathsalav/xos
|
from .plcorebase import PlCoreBase
from .planetstack import PlanetStack
from .project import Project
from .singletonmodel import SingletonModel
from .service import Service
from .service import ServiceAttribute
from .tag import Tag
from .role import Role
from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments
from .dashboard import DashboardView
from .user import User, UserDashboardView
from .serviceclass import ServiceClass
from .slice import Slice, SliceDeployments
from .site import SitePrivilege, SiteDeployments
from .userdeployments import UserDeployments
from .image import Image, ImageDeployments
from .node import Node
from .serviceresource import ServiceResource
from .slice import SliceRole
from .slice import SlicePrivilege
+ from .credential import UserCredential,SiteCredential,SliceCredential
from .site import SiteRole
from .site import SitePrivilege
from .planetstack import PlanetStackRole
from .planetstack import PlanetStackPrivilege
from .slicetag import SliceTag
from .flavor import Flavor
from .sliver import Sliver
from .reservation import ReservedResource
from .reservation import Reservation
from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments
from .billing import Account, Invoice, Charge, UsableObject, Payment
|
Add credentials module to core list
|
## Code Before:
from .plcorebase import PlCoreBase
from .planetstack import PlanetStack
from .project import Project
from .singletonmodel import SingletonModel
from .service import Service
from .service import ServiceAttribute
from .tag import Tag
from .role import Role
from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments
from .dashboard import DashboardView
from .user import User, UserDashboardView
from .serviceclass import ServiceClass
from .slice import Slice, SliceDeployments
from .site import SitePrivilege, SiteDeployments
from .userdeployments import UserDeployments
from .image import Image, ImageDeployments
from .node import Node
from .serviceresource import ServiceResource
from .slice import SliceRole
from .slice import SlicePrivilege
from .site import SiteRole
from .site import SitePrivilege
from .planetstack import PlanetStackRole
from .planetstack import PlanetStackPrivilege
from .slicetag import SliceTag
from .flavor import Flavor
from .sliver import Sliver
from .reservation import ReservedResource
from .reservation import Reservation
from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments
from .billing import Account, Invoice, Charge, UsableObject, Payment
## Instruction:
Add credentials module to core list
## Code After:
from .plcorebase import PlCoreBase
from .planetstack import PlanetStack
from .project import Project
from .singletonmodel import SingletonModel
from .service import Service
from .service import ServiceAttribute
from .tag import Tag
from .role import Role
from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments
from .dashboard import DashboardView
from .user import User, UserDashboardView
from .serviceclass import ServiceClass
from .slice import Slice, SliceDeployments
from .site import SitePrivilege, SiteDeployments
from .userdeployments import UserDeployments
from .image import Image, ImageDeployments
from .node import Node
from .serviceresource import ServiceResource
from .slice import SliceRole
from .slice import SlicePrivilege
from .credential import UserCredential,SiteCredential,SliceCredential
from .site import SiteRole
from .site import SitePrivilege
from .planetstack import PlanetStackRole
from .planetstack import PlanetStackPrivilege
from .slicetag import SliceTag
from .flavor import Flavor
from .sliver import Sliver
from .reservation import ReservedResource
from .reservation import Reservation
from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments
from .billing import Account, Invoice, Charge, UsableObject, Payment
|
from .plcorebase import PlCoreBase
from .planetstack import PlanetStack
from .project import Project
from .singletonmodel import SingletonModel
from .service import Service
from .service import ServiceAttribute
from .tag import Tag
from .role import Role
from .site import Site,Deployment, DeploymentRole, DeploymentPrivilege, SiteDeployments
from .dashboard import DashboardView
from .user import User, UserDashboardView
from .serviceclass import ServiceClass
from .slice import Slice, SliceDeployments
from .site import SitePrivilege, SiteDeployments
from .userdeployments import UserDeployments
from .image import Image, ImageDeployments
from .node import Node
from .serviceresource import ServiceResource
from .slice import SliceRole
from .slice import SlicePrivilege
+ from .credential import UserCredential,SiteCredential,SliceCredential
from .site import SiteRole
from .site import SitePrivilege
from .planetstack import PlanetStackRole
from .planetstack import PlanetStackPrivilege
from .slicetag import SliceTag
from .flavor import Flavor
from .sliver import Sliver
from .reservation import ReservedResource
from .reservation import Reservation
from .network import Network, NetworkParameterType, NetworkParameter, NetworkSliver, NetworkTemplate, Router, NetworkSlice, NetworkDeployments
from .billing import Account, Invoice, Charge, UsableObject, Payment
|
519d1f23682b6815c41c2ac34df775ea2e333eab
|
jasmin_notifications/views.py
|
jasmin_notifications/views.py
|
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.views.decorators.http import require_safe
from django import http
from django.shortcuts import redirect
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from .models import Notification, UserNotification
def _handle_notification(request, notification):
if not notification.followed_at:
notification.followed_at = timezone.now()
notification.save()
return redirect(notification.link)
@login_required
def _handle_user_notification(request, notification):
# For user notifications, the user must match the logged in user
if request.user != notification.user:
raise http.Http404("Notification does not exist")
return _handle_notification(request, notification)
@require_safe
def follow(request, uuid):
"""
Handler for ``/<uuid>/``.
Responds to GET requests only.
Marks the specified notification as read before redirecting to the link.
"""
# First, try to find a notification with the UUID
notification = Notification.objects.filter(uuid = uuid).first()
if not notification:
raise http.Http404("Notification does not exist")
# If we have a user notification, the user must match the logged in user
if isinstance(notification, UserNotification):
return _handle_user_notification(request, notification)
else:
return _handle_notification(request, notification)
|
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.views.decorators.http import require_safe
from django import http
from django.shortcuts import redirect
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from .models import Notification, UserNotification
@require_safe
def follow(request, uuid):
"""
Handler for ``/<uuid>/``.
Responds to GET requests only.
Marks all the notifications as read that have the same user and link before
redirecting to the link.
"""
# First, try to find a notification with the UUID
notification = Notification.objects.filter(uuid = uuid).first()
if not notification:
raise http.Http404("Notification does not exist")
if isinstance(notification, UserNotification):
# For user notifications, the user must match the logged in user
if request.user != notification.user:
raise http.Http404("Notification does not exist")
# Update the followed_at time for all the notifications for the same user
# and link
UserNotification.objects.filter(link = notification.link,
user = notification.user,
followed_at__isnull = True) \
.update(followed_at = timezone.now())
else:
# For email notifications, just update this notification
if not notification.followed_at:
notification.followed_at = timezone.now()
notification.save()
return redirect(notification.link)
|
Mark all similar notifications as followed when following one
|
Mark all similar notifications as followed when following one
|
Python
|
mit
|
cedadev/jasmin-notifications,cedadev/jasmin-notifications
|
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.views.decorators.http import require_safe
from django import http
from django.shortcuts import redirect
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from .models import Notification, UserNotification
- def _handle_notification(request, notification):
- if not notification.followed_at:
- notification.followed_at = timezone.now()
- notification.save()
- return redirect(notification.link)
-
- @login_required
- def _handle_user_notification(request, notification):
- # For user notifications, the user must match the logged in user
- if request.user != notification.user:
- raise http.Http404("Notification does not exist")
- return _handle_notification(request, notification)
-
@require_safe
def follow(request, uuid):
"""
Handler for ``/<uuid>/``.
Responds to GET requests only.
- Marks the specified notification as read before redirecting to the link.
+ Marks all the notifications as read that have the same user and link before
+ redirecting to the link.
"""
# First, try to find a notification with the UUID
notification = Notification.objects.filter(uuid = uuid).first()
if not notification:
raise http.Http404("Notification does not exist")
- # If we have a user notification, the user must match the logged in user
if isinstance(notification, UserNotification):
- return _handle_user_notification(request, notification)
+ # For user notifications, the user must match the logged in user
+ if request.user != notification.user:
+ raise http.Http404("Notification does not exist")
+ # Update the followed_at time for all the notifications for the same user
+ # and link
+ UserNotification.objects.filter(link = notification.link,
+ user = notification.user,
+ followed_at__isnull = True) \
+ .update(followed_at = timezone.now())
else:
- return _handle_notification(request, notification)
+ # For email notifications, just update this notification
+ if not notification.followed_at:
+ notification.followed_at = timezone.now()
+ notification.save()
+ return redirect(notification.link)
|
Mark all similar notifications as followed when following one
|
## Code Before:
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.views.decorators.http import require_safe
from django import http
from django.shortcuts import redirect
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from .models import Notification, UserNotification
def _handle_notification(request, notification):
if not notification.followed_at:
notification.followed_at = timezone.now()
notification.save()
return redirect(notification.link)
@login_required
def _handle_user_notification(request, notification):
# For user notifications, the user must match the logged in user
if request.user != notification.user:
raise http.Http404("Notification does not exist")
return _handle_notification(request, notification)
@require_safe
def follow(request, uuid):
"""
Handler for ``/<uuid>/``.
Responds to GET requests only.
Marks the specified notification as read before redirecting to the link.
"""
# First, try to find a notification with the UUID
notification = Notification.objects.filter(uuid = uuid).first()
if not notification:
raise http.Http404("Notification does not exist")
# If we have a user notification, the user must match the logged in user
if isinstance(notification, UserNotification):
return _handle_user_notification(request, notification)
else:
return _handle_notification(request, notification)
## Instruction:
Mark all similar notifications as followed when following one
## Code After:
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.views.decorators.http import require_safe
from django import http
from django.shortcuts import redirect
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from .models import Notification, UserNotification
@require_safe
def follow(request, uuid):
"""
Handler for ``/<uuid>/``.
Responds to GET requests only.
Marks all the notifications as read that have the same user and link before
redirecting to the link.
"""
# First, try to find a notification with the UUID
notification = Notification.objects.filter(uuid = uuid).first()
if not notification:
raise http.Http404("Notification does not exist")
if isinstance(notification, UserNotification):
# For user notifications, the user must match the logged in user
if request.user != notification.user:
raise http.Http404("Notification does not exist")
# Update the followed_at time for all the notifications for the same user
# and link
UserNotification.objects.filter(link = notification.link,
user = notification.user,
followed_at__isnull = True) \
.update(followed_at = timezone.now())
else:
# For email notifications, just update this notification
if not notification.followed_at:
notification.followed_at = timezone.now()
notification.save()
return redirect(notification.link)
|
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.views.decorators.http import require_safe
from django import http
from django.shortcuts import redirect
from django.utils import timezone
from django.contrib.auth.decorators import login_required
from .models import Notification, UserNotification
- def _handle_notification(request, notification):
- if not notification.followed_at:
- notification.followed_at = timezone.now()
- notification.save()
- return redirect(notification.link)
-
- @login_required
- def _handle_user_notification(request, notification):
- # For user notifications, the user must match the logged in user
- if request.user != notification.user:
- raise http.Http404("Notification does not exist")
- return _handle_notification(request, notification)
-
@require_safe
def follow(request, uuid):
"""
Handler for ``/<uuid>/``.
Responds to GET requests only.
- Marks the specified notification as read before redirecting to the link.
+ Marks all the notifications as read that have the same user and link before
+ redirecting to the link.
"""
# First, try to find a notification with the UUID
notification = Notification.objects.filter(uuid = uuid).first()
if not notification:
raise http.Http404("Notification does not exist")
- # If we have a user notification, the user must match the logged in user
if isinstance(notification, UserNotification):
- return _handle_user_notification(request, notification)
+ # For user notifications, the user must match the logged in user
+ if request.user != notification.user:
+ raise http.Http404("Notification does not exist")
+ # Update the followed_at time for all the notifications for the same user
+ # and link
+ UserNotification.objects.filter(link = notification.link,
+ user = notification.user,
+ followed_at__isnull = True) \
+ .update(followed_at = timezone.now())
else:
- return _handle_notification(request, notification)
+ # For email notifications, just update this notification
+ if not notification.followed_at:
+ notification.followed_at = timezone.now()
+ notification.save()
+ return redirect(notification.link)
|
8cceb96ae2d8352107dc2e03b336e84e9f2bdfb3
|
partner_feeds/templatetags/partner_feed_tags.py
|
partner_feeds/templatetags/partner_feed_tags.py
|
from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners
|
from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners
|
Make Django template tag forgiving of nonexistent partners.
|
Make Django template tag forgiving of nonexistent partners.
|
Python
|
bsd-2-clause
|
theatlantic/django-partner-feeds
|
from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
+ try:
- partner = Partner.objects.get(name=name)
+ partner = Partner.objects.get(name=name)
+ except Partner.DoesNotExist:
+ continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners
|
Make Django template tag forgiving of nonexistent partners.
|
## Code Before:
from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
partner = Partner.objects.get(name=name)
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners
## Instruction:
Make Django template tag forgiving of nonexistent partners.
## Code After:
from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
try:
partner = Partner.objects.get(name=name)
except Partner.DoesNotExist:
continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners
|
from django import template
from partner_feeds.models import Partner
register = template.Library()
@register.assignment_tag
def get_partners(*args):
partners = []
for name in args:
+ try:
- partner = Partner.objects.get(name=name)
+ partner = Partner.objects.get(name=name)
? ++++
+ except Partner.DoesNotExist:
+ continue
partner.posts = partner.post_set.all().order_by('-date')
partners.append(partner)
return partners
|
e17d8f9b8bd09b1b96cad3e61961f3833d2e486c
|
dataverse/file.py
|
dataverse/file.py
|
from __future__ import absolute_import
from dataverse.utils import sanitize
class DataverseFile(object):
def __init__(self, dataset, name, file_id=None):
self.dataset = dataset
self.name = sanitize(name)
self.id = file_id
self.download_url = '{0}/access/datafile/{1}'.format(
dataset.connection.native_base_url, self.id
)
edit_media_base = '{0}/edit-media/file/{1}'
self.edit_media_uri = edit_media_base.format(
dataset.connection.sword_base_url, self.id
)
@classmethod
def from_json(cls, dataset, json):
name = json['datafile']['name']
file_id = json['datafile']['id']
return cls(dataset, name, file_id)
|
from __future__ import absolute_import
from dataverse.utils import sanitize
class DataverseFile(object):
def __init__(self, dataset, name, file_id=None):
self.dataset = dataset
self.name = sanitize(name)
self.id = file_id
self.download_url = '{0}/access/datafile/{1}'.format(
dataset.connection.native_base_url, self.id
)
edit_media_base = '{0}/edit-media/file/{1}'
self.edit_media_uri = edit_media_base.format(
dataset.connection.sword_base_url, self.id
)
@classmethod
def from_json(cls, dataset, json):
try:
name = json['dataFile']['filename']
file_id = json['dataFile']['id']
except KeyError:
name = json['datafile']['name']
file_id = json['datafile']['id']
return cls(dataset, name, file_id)
|
Fix 'class DataverseFile' to handle old and new response format Tests were failing after swith to new server/version
|
Fix 'class DataverseFile' to handle old and new response format
Tests were failing after swith to new server/version
|
Python
|
apache-2.0
|
CenterForOpenScience/dataverse-client-python,IQSS/dataverse-client-python
|
from __future__ import absolute_import
from dataverse.utils import sanitize
class DataverseFile(object):
def __init__(self, dataset, name, file_id=None):
self.dataset = dataset
self.name = sanitize(name)
self.id = file_id
self.download_url = '{0}/access/datafile/{1}'.format(
dataset.connection.native_base_url, self.id
)
edit_media_base = '{0}/edit-media/file/{1}'
self.edit_media_uri = edit_media_base.format(
dataset.connection.sword_base_url, self.id
)
@classmethod
def from_json(cls, dataset, json):
+ try:
+ name = json['dataFile']['filename']
+ file_id = json['dataFile']['id']
+ except KeyError:
- name = json['datafile']['name']
+ name = json['datafile']['name']
- file_id = json['datafile']['id']
+ file_id = json['datafile']['id']
return cls(dataset, name, file_id)
|
Fix 'class DataverseFile' to handle old and new response format Tests were failing after swith to new server/version
|
## Code Before:
from __future__ import absolute_import
from dataverse.utils import sanitize
class DataverseFile(object):
def __init__(self, dataset, name, file_id=None):
self.dataset = dataset
self.name = sanitize(name)
self.id = file_id
self.download_url = '{0}/access/datafile/{1}'.format(
dataset.connection.native_base_url, self.id
)
edit_media_base = '{0}/edit-media/file/{1}'
self.edit_media_uri = edit_media_base.format(
dataset.connection.sword_base_url, self.id
)
@classmethod
def from_json(cls, dataset, json):
name = json['datafile']['name']
file_id = json['datafile']['id']
return cls(dataset, name, file_id)
## Instruction:
Fix 'class DataverseFile' to handle old and new response format Tests were failing after swith to new server/version
## Code After:
from __future__ import absolute_import
from dataverse.utils import sanitize
class DataverseFile(object):
def __init__(self, dataset, name, file_id=None):
self.dataset = dataset
self.name = sanitize(name)
self.id = file_id
self.download_url = '{0}/access/datafile/{1}'.format(
dataset.connection.native_base_url, self.id
)
edit_media_base = '{0}/edit-media/file/{1}'
self.edit_media_uri = edit_media_base.format(
dataset.connection.sword_base_url, self.id
)
@classmethod
def from_json(cls, dataset, json):
try:
name = json['dataFile']['filename']
file_id = json['dataFile']['id']
except KeyError:
name = json['datafile']['name']
file_id = json['datafile']['id']
return cls(dataset, name, file_id)
|
from __future__ import absolute_import
from dataverse.utils import sanitize
class DataverseFile(object):
def __init__(self, dataset, name, file_id=None):
self.dataset = dataset
self.name = sanitize(name)
self.id = file_id
self.download_url = '{0}/access/datafile/{1}'.format(
dataset.connection.native_base_url, self.id
)
edit_media_base = '{0}/edit-media/file/{1}'
self.edit_media_uri = edit_media_base.format(
dataset.connection.sword_base_url, self.id
)
@classmethod
def from_json(cls, dataset, json):
+ try:
+ name = json['dataFile']['filename']
+ file_id = json['dataFile']['id']
+ except KeyError:
- name = json['datafile']['name']
+ name = json['datafile']['name']
? ++++
- file_id = json['datafile']['id']
+ file_id = json['datafile']['id']
? ++++
return cls(dataset, name, file_id)
|
d04a0000d231b1a597992bd28ab4ab8de27667e2
|
cron/updateGameCache.py
|
cron/updateGameCache.py
|
import urllib2
urllib2.urlopen('http://www.gamingwithlemons.com/cron/update')
|
import urllib.request
urllib.request.urlopen('http://www.gamingwithlemons.com/cron/update')
|
Update cron job to use python3
|
Update cron job to use python3
|
Python
|
mit
|
rewphus/tidbitsdev,Clidus/gwl,rewphus/tidbitsdev,Clidus/gwl,rewphus/tidbitsdev,rewphus/tidbitsdev,Clidus/gwl,Clidus/gwl
|
- import urllib2
+ import urllib.request
- urllib2.urlopen('http://www.gamingwithlemons.com/cron/update')
+ urllib.request.urlopen('http://www.gamingwithlemons.com/cron/update')
|
Update cron job to use python3
|
## Code Before:
import urllib2
urllib2.urlopen('http://www.gamingwithlemons.com/cron/update')
## Instruction:
Update cron job to use python3
## Code After:
import urllib.request
urllib.request.urlopen('http://www.gamingwithlemons.com/cron/update')
|
- import urllib2
+ import urllib.request
- urllib2.urlopen('http://www.gamingwithlemons.com/cron/update')
? ^
+ urllib.request.urlopen('http://www.gamingwithlemons.com/cron/update')
? ^^^^^^^^
|
07f409bb6b8d008cf473aeb33fd0833dccfba402
|
mm1_main.py
|
mm1_main.py
|
import mm1
import sim
### Params
# Mean interarrival rate of customers per second;
# hence, 0.05 <=> 3 people/minute
interarrival_rate = 0.05
# Mean service rate by the teller per second;
# hence, 0.1 <=> 6 people/minute
service_rate = 0.1
### Initialize
# Create new simulation engine
se = sim.SimulationEngine()
# Seed default PRNG
se.prng.seed = 100
# Create MM1 specific event handler
event_handler = mm1.MM1EventHandler()
event_handler.interarrival_rate = interarrival_rate
event_handler.service_rate = service_rate
### Simulate
# Schedule finishing event; simulate for 24h
se.stop(60*60*24)
# Start simulating
se.start()
|
import argparse
import mm1
import sim
import time
### Parse command line arguments
parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Main script")
parser.add_argument('sim_duration', metavar='simulation_duration',
type=int, help='simulation duration in seconds')
parser.add_argument('--seed', dest='seed', default=int(round(time.time())),
type=int, help='seed for the PRNG (default: current system timestamp)')
args = parser.parse_args()
sim_duration = args.sim_duration
seed = args.seed
### Params
# Mean interarrival rate of customers per second;
# hence, 0.05 <=> 3 people/minute
interarrival_rate = 0.05
# Mean service rate by the teller per second;
# hence, 0.1 <=> 6 people/minute
service_rate = 0.1
### Initialize
# Create new simulation engine
se = sim.SimulationEngine()
# Seed default PRNG
se.prng.seed = seed
# Create MM1 specific event handler
event_handler = mm1.MM1EventHandler()
event_handler.interarrival_rate = interarrival_rate
event_handler.service_rate = service_rate
### Simulate
# Schedule finishing event
se.stop(sim_duration)
# Start simulating
se.start()
|
Add command line arguments to main script.
|
Add command line arguments to main script.
|
Python
|
mit
|
kubkon/des-in-python
|
+ import argparse
import mm1
import sim
+ import time
+
+ ### Parse command line arguments
+ parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Main script")
+ parser.add_argument('sim_duration', metavar='simulation_duration',
+ type=int, help='simulation duration in seconds')
+ parser.add_argument('--seed', dest='seed', default=int(round(time.time())),
+ type=int, help='seed for the PRNG (default: current system timestamp)')
+ args = parser.parse_args()
+ sim_duration = args.sim_duration
+ seed = args.seed
### Params
# Mean interarrival rate of customers per second;
# hence, 0.05 <=> 3 people/minute
interarrival_rate = 0.05
# Mean service rate by the teller per second;
# hence, 0.1 <=> 6 people/minute
service_rate = 0.1
### Initialize
# Create new simulation engine
se = sim.SimulationEngine()
# Seed default PRNG
- se.prng.seed = 100
+ se.prng.seed = seed
# Create MM1 specific event handler
event_handler = mm1.MM1EventHandler()
event_handler.interarrival_rate = interarrival_rate
event_handler.service_rate = service_rate
### Simulate
- # Schedule finishing event; simulate for 24h
- se.stop(60*60*24)
+ # Schedule finishing event
+ se.stop(sim_duration)
# Start simulating
se.start()
|
Add command line arguments to main script.
|
## Code Before:
import mm1
import sim
### Params
# Mean interarrival rate of customers per second;
# hence, 0.05 <=> 3 people/minute
interarrival_rate = 0.05
# Mean service rate by the teller per second;
# hence, 0.1 <=> 6 people/minute
service_rate = 0.1
### Initialize
# Create new simulation engine
se = sim.SimulationEngine()
# Seed default PRNG
se.prng.seed = 100
# Create MM1 specific event handler
event_handler = mm1.MM1EventHandler()
event_handler.interarrival_rate = interarrival_rate
event_handler.service_rate = service_rate
### Simulate
# Schedule finishing event; simulate for 24h
se.stop(60*60*24)
# Start simulating
se.start()
## Instruction:
Add command line arguments to main script.
## Code After:
import argparse
import mm1
import sim
import time
### Parse command line arguments
parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Main script")
parser.add_argument('sim_duration', metavar='simulation_duration',
type=int, help='simulation duration in seconds')
parser.add_argument('--seed', dest='seed', default=int(round(time.time())),
type=int, help='seed for the PRNG (default: current system timestamp)')
args = parser.parse_args()
sim_duration = args.sim_duration
seed = args.seed
### Params
# Mean interarrival rate of customers per second;
# hence, 0.05 <=> 3 people/minute
interarrival_rate = 0.05
# Mean service rate by the teller per second;
# hence, 0.1 <=> 6 people/minute
service_rate = 0.1
### Initialize
# Create new simulation engine
se = sim.SimulationEngine()
# Seed default PRNG
se.prng.seed = seed
# Create MM1 specific event handler
event_handler = mm1.MM1EventHandler()
event_handler.interarrival_rate = interarrival_rate
event_handler.service_rate = service_rate
### Simulate
# Schedule finishing event
se.stop(sim_duration)
# Start simulating
se.start()
|
+ import argparse
import mm1
import sim
+ import time
+
+ ### Parse command line arguments
+ parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Main script")
+ parser.add_argument('sim_duration', metavar='simulation_duration',
+ type=int, help='simulation duration in seconds')
+ parser.add_argument('--seed', dest='seed', default=int(round(time.time())),
+ type=int, help='seed for the PRNG (default: current system timestamp)')
+ args = parser.parse_args()
+ sim_duration = args.sim_duration
+ seed = args.seed
### Params
# Mean interarrival rate of customers per second;
# hence, 0.05 <=> 3 people/minute
interarrival_rate = 0.05
# Mean service rate by the teller per second;
# hence, 0.1 <=> 6 people/minute
service_rate = 0.1
### Initialize
# Create new simulation engine
se = sim.SimulationEngine()
# Seed default PRNG
- se.prng.seed = 100
? ^^^
+ se.prng.seed = seed
? ^^^^
# Create MM1 specific event handler
event_handler = mm1.MM1EventHandler()
event_handler.interarrival_rate = interarrival_rate
event_handler.service_rate = service_rate
### Simulate
- # Schedule finishing event; simulate for 24h
- se.stop(60*60*24)
+ # Schedule finishing event
+ se.stop(sim_duration)
# Start simulating
se.start()
|
15a5e861e63fa5b2662968ce4296c75ecfadee50
|
iscc_bench/readers/__init__.py
|
iscc_bench/readers/__init__.py
|
from iscc_bench.readers.bxbooks import bxbooks
ALL_READERS = (bxbooks,)
|
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.harvard import harvard
ALL_READERS = (bxbooks, harvard)
|
Add harvard reader to ALL_READERS
|
Add harvard reader to ALL_READERS
|
Python
|
bsd-2-clause
|
coblo/isccbench
|
from iscc_bench.readers.bxbooks import bxbooks
+ from iscc_bench.readers.harvard import harvard
- ALL_READERS = (bxbooks,)
+ ALL_READERS = (bxbooks, harvard)
|
Add harvard reader to ALL_READERS
|
## Code Before:
from iscc_bench.readers.bxbooks import bxbooks
ALL_READERS = (bxbooks,)
## Instruction:
Add harvard reader to ALL_READERS
## Code After:
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.harvard import harvard
ALL_READERS = (bxbooks, harvard)
|
from iscc_bench.readers.bxbooks import bxbooks
+ from iscc_bench.readers.harvard import harvard
- ALL_READERS = (bxbooks,)
+ ALL_READERS = (bxbooks, harvard)
? ++++++++
|
8653f2c0e63fecd5617dfa063878c846ddafcf97
|
tests/test_add_language/test_update_language_list.py
|
tests/test_add_language/test_update_language_list.py
|
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
kln_language_id = 'kln'
kln_language_name = 'Klingon'
add_lang.update_language_list(kln_language_id, kln_language_name)
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
kln_lang = None
for lang in langs:
if lang['id'] == kln_language_id:
kln_lang = lang
nose.assert_is_not_none(kln_lang)
nose.assert_equal(kln_lang['name'], kln_language_name)
|
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
new_language_id = 'kln'
new_language_name = 'Klingon'
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
orig_num_langs = len(langs)
add_lang.update_language_list(new_language_id, new_language_name)
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
num_langs = len(langs)
nose.assert_equal(num_langs, orig_num_langs + 1)
new_lang = None
for lang in langs:
if lang['id'] == new_language_id:
new_lang = lang
nose.assert_is_not_none(new_lang)
nose.assert_equal(new_lang['name'], new_language_name)
|
Add additional checks to update_language_list test
|
Add additional checks to update_language_list test
Also make language variable names independent of their actual values.
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
- kln_language_id = 'kln'
+ new_language_id = 'kln'
- kln_language_name = 'Klingon'
+ new_language_name = 'Klingon'
- add_lang.update_language_list(kln_language_id, kln_language_name)
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
+ orig_num_langs = len(langs)
+ add_lang.update_language_list(new_language_id, new_language_name)
+ with open(langs_path, 'r') as langs_file:
+ langs = json.load(langs_file)
+ num_langs = len(langs)
+ nose.assert_equal(num_langs, orig_num_langs + 1)
- kln_lang = None
+ new_lang = None
for lang in langs:
- if lang['id'] == kln_language_id:
+ if lang['id'] == new_language_id:
- kln_lang = lang
+ new_lang = lang
- nose.assert_is_not_none(kln_lang)
+ nose.assert_is_not_none(new_lang)
- nose.assert_equal(kln_lang['name'], kln_language_name)
+ nose.assert_equal(new_lang['name'], new_language_name)
|
Add additional checks to update_language_list test
|
## Code Before:
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
kln_language_id = 'kln'
kln_language_name = 'Klingon'
add_lang.update_language_list(kln_language_id, kln_language_name)
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
kln_lang = None
for lang in langs:
if lang['id'] == kln_language_id:
kln_lang = lang
nose.assert_is_not_none(kln_lang)
nose.assert_equal(kln_lang['name'], kln_language_name)
## Instruction:
Add additional checks to update_language_list test
## Code After:
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
new_language_id = 'kln'
new_language_name = 'Klingon'
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
orig_num_langs = len(langs)
add_lang.update_language_list(new_language_id, new_language_name)
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
num_langs = len(langs)
nose.assert_equal(num_langs, orig_num_langs + 1)
new_lang = None
for lang in langs:
if lang['id'] == new_language_id:
new_lang = lang
nose.assert_is_not_none(new_lang)
nose.assert_equal(new_lang['name'], new_language_name)
|
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
@nose.with_setup(set_up, tear_down)
def test_update_languge_list_add():
"""should add new languages to language list"""
- kln_language_id = 'kln'
? --
+ new_language_id = 'kln'
? ++
- kln_language_name = 'Klingon'
? --
+ new_language_name = 'Klingon'
? ++
- add_lang.update_language_list(kln_language_id, kln_language_name)
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
+ orig_num_langs = len(langs)
+ add_lang.update_language_list(new_language_id, new_language_name)
+ with open(langs_path, 'r') as langs_file:
+ langs = json.load(langs_file)
+ num_langs = len(langs)
+ nose.assert_equal(num_langs, orig_num_langs + 1)
- kln_lang = None
? --
+ new_lang = None
? ++
for lang in langs:
- if lang['id'] == kln_language_id:
? --
+ if lang['id'] == new_language_id:
? ++
- kln_lang = lang
? --
+ new_lang = lang
? ++
- nose.assert_is_not_none(kln_lang)
? --
+ nose.assert_is_not_none(new_lang)
? ++
- nose.assert_equal(kln_lang['name'], kln_language_name)
? -- --
+ nose.assert_equal(new_lang['name'], new_language_name)
? ++ ++
|
2ac94aa922dbf2d07039bc6545e7b1d31c5c9e4e
|
src/cclib/progress/__init__.py
|
src/cclib/progress/__init__.py
|
__revision__ = "$Revision$"
from textprogress import TextProgress
try:
import qt
except ImportError:
pass # import QtProgress will cause an error
else:
from qtprogress import QtProgress
|
__revision__ = "$Revision$"
from textprogress import TextProgress
import sys
if 'qt' in sys.modules.keys():
from qtprogress import QtProgress
|
Check to see if qt is loaded; if so, export QtProgress class
|
Check to see if qt is loaded; if so, export QtProgress class
|
Python
|
lgpl-2.1
|
Clyde-fare/cclib,Schamnad/cclib,jchodera/cclib,ghutchis/cclib,gaursagar/cclib,berquist/cclib,andersx/cclib,ben-albrecht/cclib,Schamnad/cclib,cclib/cclib,ATenderholt/cclib,cclib/cclib,langner/cclib,berquist/cclib,Clyde-fare/cclib,langner/cclib,ben-albrecht/cclib,ATenderholt/cclib,ghutchis/cclib,jchodera/cclib,andersx/cclib,cclib/cclib,langner/cclib,berquist/cclib,gaursagar/cclib
|
__revision__ = "$Revision$"
from textprogress import TextProgress
+ import sys
+
+ if 'qt' in sys.modules.keys():
- try:
- import qt
- except ImportError:
- pass # import QtProgress will cause an error
- else:
from qtprogress import QtProgress
|
Check to see if qt is loaded; if so, export QtProgress class
|
## Code Before:
__revision__ = "$Revision$"
from textprogress import TextProgress
try:
import qt
except ImportError:
pass # import QtProgress will cause an error
else:
from qtprogress import QtProgress
## Instruction:
Check to see if qt is loaded; if so, export QtProgress class
## Code After:
__revision__ = "$Revision$"
from textprogress import TextProgress
import sys
if 'qt' in sys.modules.keys():
from qtprogress import QtProgress
|
__revision__ = "$Revision$"
from textprogress import TextProgress
+ import sys
+
+ if 'qt' in sys.modules.keys():
- try:
- import qt
- except ImportError:
- pass # import QtProgress will cause an error
- else:
from qtprogress import QtProgress
|
5c9bdb1260562f0623807ce9a5751d33c806374a
|
pyfr/nputil.py
|
pyfr/nputil.py
|
import numpy as np
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
import numpy as np
def npaligned(shape, dtype, alignb=32):
nbytes = np.prod(shape)*np.dtype(dtype).itemsize
buf = np.zeros(nbytes + alignb, dtype=np.uint8)
off = -buf.ctypes.data % alignb
return buf[off:nbytes + off].view(dtype).reshape(shape)
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
Add support for allocating aligned NumPy arrays.
|
Add support for allocating aligned NumPy arrays.
|
Python
|
bsd-3-clause
|
tjcorona/PyFR,tjcorona/PyFR,BrianVermeire/PyFR,Aerojspark/PyFR,iyer-arvind/PyFR,tjcorona/PyFR
|
import numpy as np
+
+
+ def npaligned(shape, dtype, alignb=32):
+ nbytes = np.prod(shape)*np.dtype(dtype).itemsize
+ buf = np.zeros(nbytes + alignb, dtype=np.uint8)
+ off = -buf.ctypes.data % alignb
+
+ return buf[off:nbytes + off].view(dtype).reshape(shape)
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
Add support for allocating aligned NumPy arrays.
|
## Code Before:
import numpy as np
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
## Instruction:
Add support for allocating aligned NumPy arrays.
## Code After:
import numpy as np
def npaligned(shape, dtype, alignb=32):
nbytes = np.prod(shape)*np.dtype(dtype).itemsize
buf = np.zeros(nbytes + alignb, dtype=np.uint8)
off = -buf.ctypes.data % alignb
return buf[off:nbytes + off].view(dtype).reshape(shape)
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
import numpy as np
+
+
+ def npaligned(shape, dtype, alignb=32):
+ nbytes = np.prod(shape)*np.dtype(dtype).itemsize
+ buf = np.zeros(nbytes + alignb, dtype=np.uint8)
+ off = -buf.ctypes.data % alignb
+
+ return buf[off:nbytes + off].view(dtype).reshape(shape)
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
b812a8da81ec9943d11b8cb9f709e234c90a2282
|
stylo/utils.py
|
stylo/utils.py
|
from uuid import uuid4
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
import inspect
from uuid import uuid4
def get_parameters(f):
return list(inspect.signature(f).parameters.keys())
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
Add the function back for now
|
Add the function back for now
|
Python
|
mit
|
alcarney/stylo,alcarney/stylo
|
+ import inspect
from uuid import uuid4
+
+
+ def get_parameters(f):
+ return list(inspect.signature(f).parameters.keys())
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
Add the function back for now
|
## Code Before:
from uuid import uuid4
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
## Instruction:
Add the function back for now
## Code After:
import inspect
from uuid import uuid4
def get_parameters(f):
return list(inspect.signature(f).parameters.keys())
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
+ import inspect
from uuid import uuid4
+
+
+ def get_parameters(f):
+ return list(inspect.signature(f).parameters.keys())
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
0b7a1904ef5511916fc4978c325862241a46aef3
|
lib/pyfrc/mains/cli_profiler.py
|
lib/pyfrc/mains/cli_profiler.py
|
import argparse
import inspect
import subprocess
import sys
class PyFrcProfiler:
"""
Wraps other commands by running them via the built in cProfile module.
Use this to profile your program and figure out where you're spending
a lot of time (note that cProfile only profiles the main thread)
"""
def __init__(self, parser):
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Arguments to pass to robot.py')
def run(self, options, robot_class, **static_options):
from .. import config
config.mode = 'profiler'
try:
import cProfile
except ImportError:
print("Error importing cProfile module for profiling, your python interpreter may not support profiling\n", file=sys.stderr)
return 1
if len(options.args) == 0:
print("ERROR: Profiler command requires arguments to run other commands")
return 1
file_location = inspect.getfile(robot_class)
# construct the arguments to run the profiler
args = [sys.executable, '-m', 'cProfile', '-s', 'tottime', file_location] + options.args
return subprocess.call(args)
|
import argparse
import inspect
from os.path import abspath
import subprocess
import sys
class PyFrcProfiler:
"""
Wraps other commands by running them via the built in cProfile module.
Use this to profile your program and figure out where you're spending
a lot of time (note that cProfile only profiles the main thread)
"""
def __init__(self, parser):
parser.add_argument('-o', '--outfile', default=None,
help="Save stats to <outfile>")
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Arguments to pass to robot.py')
def run(self, options, robot_class, **static_options):
from .. import config
config.mode = 'profiler'
try:
import cProfile
except ImportError:
print("Error importing cProfile module for profiling, your python interpreter may not support profiling\n", file=sys.stderr)
return 1
if len(options.args) == 0:
print("ERROR: Profiler command requires arguments to run other commands")
return 1
file_location = abspath(inspect.getfile(robot_class))
if options.outfile:
profile_args = ['-o', options.outfile]
else:
profile_args = ['-s', 'tottime']
# construct the arguments to run the profiler
args = [sys.executable, '-m', 'cProfile'] + profile_args + [file_location] + options.args
return subprocess.call(args)
|
Add output option for profiler
|
Add output option for profiler
|
Python
|
mit
|
robotpy/pyfrc
|
import argparse
import inspect
+ from os.path import abspath
import subprocess
import sys
class PyFrcProfiler:
"""
Wraps other commands by running them via the built in cProfile module.
Use this to profile your program and figure out where you're spending
a lot of time (note that cProfile only profiles the main thread)
"""
def __init__(self, parser):
+ parser.add_argument('-o', '--outfile', default=None,
+ help="Save stats to <outfile>")
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Arguments to pass to robot.py')
def run(self, options, robot_class, **static_options):
from .. import config
config.mode = 'profiler'
try:
import cProfile
except ImportError:
print("Error importing cProfile module for profiling, your python interpreter may not support profiling\n", file=sys.stderr)
return 1
if len(options.args) == 0:
print("ERROR: Profiler command requires arguments to run other commands")
return 1
- file_location = inspect.getfile(robot_class)
+ file_location = abspath(inspect.getfile(robot_class))
+ if options.outfile:
+ profile_args = ['-o', options.outfile]
+ else:
+ profile_args = ['-s', 'tottime']
+
# construct the arguments to run the profiler
- args = [sys.executable, '-m', 'cProfile', '-s', 'tottime', file_location] + options.args
+ args = [sys.executable, '-m', 'cProfile'] + profile_args + [file_location] + options.args
return subprocess.call(args)
|
Add output option for profiler
|
## Code Before:
import argparse
import inspect
import subprocess
import sys
class PyFrcProfiler:
"""
Wraps other commands by running them via the built in cProfile module.
Use this to profile your program and figure out where you're spending
a lot of time (note that cProfile only profiles the main thread)
"""
def __init__(self, parser):
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Arguments to pass to robot.py')
def run(self, options, robot_class, **static_options):
from .. import config
config.mode = 'profiler'
try:
import cProfile
except ImportError:
print("Error importing cProfile module for profiling, your python interpreter may not support profiling\n", file=sys.stderr)
return 1
if len(options.args) == 0:
print("ERROR: Profiler command requires arguments to run other commands")
return 1
file_location = inspect.getfile(robot_class)
# construct the arguments to run the profiler
args = [sys.executable, '-m', 'cProfile', '-s', 'tottime', file_location] + options.args
return subprocess.call(args)
## Instruction:
Add output option for profiler
## Code After:
import argparse
import inspect
from os.path import abspath
import subprocess
import sys
class PyFrcProfiler:
"""
Wraps other commands by running them via the built in cProfile module.
Use this to profile your program and figure out where you're spending
a lot of time (note that cProfile only profiles the main thread)
"""
def __init__(self, parser):
parser.add_argument('-o', '--outfile', default=None,
help="Save stats to <outfile>")
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Arguments to pass to robot.py')
def run(self, options, robot_class, **static_options):
from .. import config
config.mode = 'profiler'
try:
import cProfile
except ImportError:
print("Error importing cProfile module for profiling, your python interpreter may not support profiling\n", file=sys.stderr)
return 1
if len(options.args) == 0:
print("ERROR: Profiler command requires arguments to run other commands")
return 1
file_location = abspath(inspect.getfile(robot_class))
if options.outfile:
profile_args = ['-o', options.outfile]
else:
profile_args = ['-s', 'tottime']
# construct the arguments to run the profiler
args = [sys.executable, '-m', 'cProfile'] + profile_args + [file_location] + options.args
return subprocess.call(args)
|
import argparse
import inspect
+ from os.path import abspath
import subprocess
import sys
class PyFrcProfiler:
"""
Wraps other commands by running them via the built in cProfile module.
Use this to profile your program and figure out where you're spending
a lot of time (note that cProfile only profiles the main thread)
"""
def __init__(self, parser):
+ parser.add_argument('-o', '--outfile', default=None,
+ help="Save stats to <outfile>")
parser.add_argument('args', nargs=argparse.REMAINDER,
help='Arguments to pass to robot.py')
def run(self, options, robot_class, **static_options):
from .. import config
config.mode = 'profiler'
try:
import cProfile
except ImportError:
print("Error importing cProfile module for profiling, your python interpreter may not support profiling\n", file=sys.stderr)
return 1
if len(options.args) == 0:
print("ERROR: Profiler command requires arguments to run other commands")
return 1
- file_location = inspect.getfile(robot_class)
+ file_location = abspath(inspect.getfile(robot_class))
? ++++++++ +
+ if options.outfile:
+ profile_args = ['-o', options.outfile]
+ else:
+ profile_args = ['-s', 'tottime']
+
# construct the arguments to run the profiler
- args = [sys.executable, '-m', 'cProfile', '-s', 'tottime', file_location] + options.args
? ^ ^^ -- ^^^^^^^^^^
+ args = [sys.executable, '-m', 'cProfile'] + profile_args + [file_location] + options.args
? ^ ^^^^^^^^^^^^^ ^ +
return subprocess.call(args)
|
a037843f62a3d6b1124f8b62517463ef92cd793f
|
tvsort_sl/fcntl.py
|
tvsort_sl/fcntl.py
|
from __future__ import unicode_literals
def fcntl(fd, op, arg=0):
return 0
def ioctl(fd, op, arg=0, mutable_flag=True):
if mutable_flag:
return 0
else:
return ""
def flock(fd, op):
return
def lockf(fd, operation, length=0, start=0, whence=0):
return
|
from __future__ import unicode_literals
# Variables with simple values
FASYNC = 64
FD_CLOEXEC = 1
F_DUPFD = 0
F_FULLFSYNC = 51
F_GETFD = 1
F_GETFL = 3
F_GETLK = 7
F_GETOWN = 5
F_RDLCK = 1
F_SETFD = 2
F_SETFL = 4
F_SETLK = 8
F_SETLKW = 9
F_SETOWN = 6
F_UNLCK = 2
F_WRLCK = 3
LOCK_EX = 2
LOCK_NB = 4
LOCK_SH = 1
LOCK_UN = 8
def fcntl(fd, op, arg=0):
return 0
def ioctl(fd, op, arg=0, mutable_flag=True):
if mutable_flag:
return 0
else:
return ""
def flock(fd, op):
return
def lockf(fd, operation, length=0, start=0, whence=0):
return
|
Add missing variables to cntl
|
Add missing variables to cntl
|
Python
|
mit
|
shlomiLan/tvsort_sl
|
from __future__ import unicode_literals
+
+ # Variables with simple values
+
+ FASYNC = 64
+
+ FD_CLOEXEC = 1
+
+ F_DUPFD = 0
+ F_FULLFSYNC = 51
+ F_GETFD = 1
+ F_GETFL = 3
+ F_GETLK = 7
+ F_GETOWN = 5
+ F_RDLCK = 1
+ F_SETFD = 2
+ F_SETFL = 4
+ F_SETLK = 8
+ F_SETLKW = 9
+ F_SETOWN = 6
+ F_UNLCK = 2
+ F_WRLCK = 3
+
+ LOCK_EX = 2
+ LOCK_NB = 4
+ LOCK_SH = 1
+ LOCK_UN = 8
def fcntl(fd, op, arg=0):
return 0
def ioctl(fd, op, arg=0, mutable_flag=True):
if mutable_flag:
return 0
else:
return ""
def flock(fd, op):
return
def lockf(fd, operation, length=0, start=0, whence=0):
return
|
Add missing variables to cntl
|
## Code Before:
from __future__ import unicode_literals
def fcntl(fd, op, arg=0):
return 0
def ioctl(fd, op, arg=0, mutable_flag=True):
if mutable_flag:
return 0
else:
return ""
def flock(fd, op):
return
def lockf(fd, operation, length=0, start=0, whence=0):
return
## Instruction:
Add missing variables to cntl
## Code After:
from __future__ import unicode_literals
# Variables with simple values
FASYNC = 64
FD_CLOEXEC = 1
F_DUPFD = 0
F_FULLFSYNC = 51
F_GETFD = 1
F_GETFL = 3
F_GETLK = 7
F_GETOWN = 5
F_RDLCK = 1
F_SETFD = 2
F_SETFL = 4
F_SETLK = 8
F_SETLKW = 9
F_SETOWN = 6
F_UNLCK = 2
F_WRLCK = 3
LOCK_EX = 2
LOCK_NB = 4
LOCK_SH = 1
LOCK_UN = 8
def fcntl(fd, op, arg=0):
return 0
def ioctl(fd, op, arg=0, mutable_flag=True):
if mutable_flag:
return 0
else:
return ""
def flock(fd, op):
return
def lockf(fd, operation, length=0, start=0, whence=0):
return
|
from __future__ import unicode_literals
+
+ # Variables with simple values
+
+ FASYNC = 64
+
+ FD_CLOEXEC = 1
+
+ F_DUPFD = 0
+ F_FULLFSYNC = 51
+ F_GETFD = 1
+ F_GETFL = 3
+ F_GETLK = 7
+ F_GETOWN = 5
+ F_RDLCK = 1
+ F_SETFD = 2
+ F_SETFL = 4
+ F_SETLK = 8
+ F_SETLKW = 9
+ F_SETOWN = 6
+ F_UNLCK = 2
+ F_WRLCK = 3
+
+ LOCK_EX = 2
+ LOCK_NB = 4
+ LOCK_SH = 1
+ LOCK_UN = 8
def fcntl(fd, op, arg=0):
return 0
def ioctl(fd, op, arg=0, mutable_flag=True):
if mutable_flag:
return 0
else:
return ""
def flock(fd, op):
return
def lockf(fd, operation, length=0, start=0, whence=0):
return
|
e3e98b0533460837c4ea2eac67c4281eb0ba0012
|
test/requests/parametrized_test.py
|
test/requests/parametrized_test.py
|
import logging
import unittest
from wqflask import app
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = Elasticsearch([self.es_url])
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
|
import logging
import unittest
from wqflask import app
from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = get_elasticsearch_connection()
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
from time import sleep
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
sleep(1)
|
Use existing code. Delay after delete.
|
Use existing code. Delay after delete.
* Use existing code to get the elasticsearch connection. This should
prevent tests from failing in case the way connections to
elasticsearch are made change.
* Delay a while after deleting to allow elasticsearch to re-index the
data, thus preventing subtle bugs in the test.
|
Python
|
agpl-3.0
|
DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2
|
import logging
import unittest
from wqflask import app
+ from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
- self.es = Elasticsearch([self.es_url])
+ self.es = get_elasticsearch_connection()
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
+ from time import sleep
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
+ sleep(1)
|
Use existing code. Delay after delete.
|
## Code Before:
import logging
import unittest
from wqflask import app
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = Elasticsearch([self.es_url])
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
## Instruction:
Use existing code. Delay after delete.
## Code After:
import logging
import unittest
from wqflask import app
from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
self.es = get_elasticsearch_connection()
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
from time import sleep
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
sleep(1)
|
import logging
import unittest
from wqflask import app
+ from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
from elasticsearch import Elasticsearch, TransportError
class ParametrizedTest(unittest.TestCase):
def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
super(ParametrizedTest, self).__init__(methodName=methodName)
self.gn2_url = gn2_url
self.es_url = es_url
def setUp(self):
- self.es = Elasticsearch([self.es_url])
+ self.es = get_elasticsearch_connection()
self.es_cleanup = []
es_logger = logging.getLogger("elasticsearch")
es_logger.setLevel(app.config.get("LOG_LEVEL"))
es_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
es_trace_logger = logging.getLogger("elasticsearch.trace")
es_trace_logger.addHandler(
logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
def tearDown(self):
+ from time import sleep
self.es.delete_by_query(
index="users"
, doc_type="local"
, body={"query":{"match":{"email_address":"[email protected]"}}})
+ sleep(1)
|
886c2b92d8dcc40577341245f7973d4a2d31aa90
|
tests/core/test_mixer.py
|
tests/core/test_mixer.py
|
from __future__ import absolute_import, unicode_literals
import unittest
from mopidy import core
class CoreMixerTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.core = core.Core(mixer=None, backends=[])
def test_volume(self):
self.assertEqual(self.core.mixer.get_volume(), None)
self.core.mixer.set_volume(30)
self.assertEqual(self.core.mixer.get_volume(), 30)
self.core.mixer.set_volume(70)
self.assertEqual(self.core.mixer.get_volume(), 70)
def test_mute(self):
self.assertEqual(self.core.mixer.get_mute(), False)
self.core.mixer.set_mute(True)
self.assertEqual(self.core.mixer.get_mute(), True)
|
from __future__ import absolute_import, unicode_literals
import unittest
import mock
from mopidy import core, mixer
class CoreMixerTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.mixer = mock.Mock(spec=mixer.Mixer)
self.core = core.Core(mixer=self.mixer, backends=[])
def test_get_volume(self):
self.mixer.get_volume.return_value.get.return_value = 30
self.assertEqual(self.core.mixer.get_volume(), 30)
self.mixer.get_volume.assert_called_once_with()
def test_set_volume(self):
self.core.mixer.set_volume(30)
self.mixer.set_volume.assert_called_once_with(30)
def test_get_mute(self):
self.mixer.get_mute.return_value.get.return_value = True
self.assertEqual(self.core.mixer.get_mute(), True)
self.mixer.get_mute.assert_called_once_with()
def test_set_mute(self):
self.core.mixer.set_mute(True)
self.mixer.set_mute.assert_called_once_with(True)
|
Use a mixer mock in tests
|
core: Use a mixer mock in tests
|
Python
|
apache-2.0
|
ali/mopidy,tkem/mopidy,dbrgn/mopidy,ali/mopidy,bacontext/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,swak/mopidy,mopidy/mopidy,jodal/mopidy,ali/mopidy,swak/mopidy,dbrgn/mopidy,dbrgn/mopidy,ZenithDK/mopidy,rawdlite/mopidy,hkariti/mopidy,quartz55/mopidy,jodal/mopidy,jodal/mopidy,kingosticks/mopidy,vrs01/mopidy,rawdlite/mopidy,diandiankan/mopidy,hkariti/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,jcass77/mopidy,adamcik/mopidy,pacificIT/mopidy,jmarsik/mopidy,tkem/mopidy,jmarsik/mopidy,mopidy/mopidy,diandiankan/mopidy,tkem/mopidy,mopidy/mopidy,vrs01/mopidy,rawdlite/mopidy,vrs01/mopidy,ali/mopidy,pacificIT/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,jcass77/mopidy,bacontext/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,adamcik/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,bacontext/mopidy,bencevans/mopidy,vrs01/mopidy,rawdlite/mopidy,jcass77/mopidy,quartz55/mopidy,ZenithDK/mopidy,swak/mopidy,bencevans/mopidy,quartz55/mopidy,mokieyue/mopidy,kingosticks/mopidy,bencevans/mopidy,kingosticks/mopidy,diandiankan/mopidy,bencevans/mopidy,glogiotatidis/mopidy,tkem/mopidy,quartz55/mopidy,diandiankan/mopidy,mokieyue/mopidy,mokieyue/mopidy,jmarsik/mopidy,bacontext/mopidy,swak/mopidy,adamcik/mopidy,hkariti/mopidy,hkariti/mopidy,pacificIT/mopidy
|
from __future__ import absolute_import, unicode_literals
import unittest
+ import mock
+
- from mopidy import core
+ from mopidy import core, mixer
class CoreMixerTest(unittest.TestCase):
def setUp(self): # noqa: N802
+ self.mixer = mock.Mock(spec=mixer.Mixer)
- self.core = core.Core(mixer=None, backends=[])
+ self.core = core.Core(mixer=self.mixer, backends=[])
- def test_volume(self):
+ def test_get_volume(self):
- self.assertEqual(self.core.mixer.get_volume(), None)
+ self.mixer.get_volume.return_value.get.return_value = 30
+ self.assertEqual(self.core.mixer.get_volume(), 30)
+ self.mixer.get_volume.assert_called_once_with()
+
+ def test_set_volume(self):
self.core.mixer.set_volume(30)
- self.assertEqual(self.core.mixer.get_volume(), 30)
+ self.mixer.set_volume.assert_called_once_with(30)
- self.core.mixer.set_volume(70)
+ def test_get_mute(self):
+ self.mixer.get_mute.return_value.get.return_value = True
- self.assertEqual(self.core.mixer.get_volume(), 70)
+ self.assertEqual(self.core.mixer.get_mute(), True)
+ self.mixer.get_mute.assert_called_once_with()
- def test_mute(self):
+ def test_set_mute(self):
- self.assertEqual(self.core.mixer.get_mute(), False)
-
self.core.mixer.set_mute(True)
- self.assertEqual(self.core.mixer.get_mute(), True)
+ self.mixer.set_mute.assert_called_once_with(True)
|
Use a mixer mock in tests
|
## Code Before:
from __future__ import absolute_import, unicode_literals
import unittest
from mopidy import core
class CoreMixerTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.core = core.Core(mixer=None, backends=[])
def test_volume(self):
self.assertEqual(self.core.mixer.get_volume(), None)
self.core.mixer.set_volume(30)
self.assertEqual(self.core.mixer.get_volume(), 30)
self.core.mixer.set_volume(70)
self.assertEqual(self.core.mixer.get_volume(), 70)
def test_mute(self):
self.assertEqual(self.core.mixer.get_mute(), False)
self.core.mixer.set_mute(True)
self.assertEqual(self.core.mixer.get_mute(), True)
## Instruction:
Use a mixer mock in tests
## Code After:
from __future__ import absolute_import, unicode_literals
import unittest
import mock
from mopidy import core, mixer
class CoreMixerTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.mixer = mock.Mock(spec=mixer.Mixer)
self.core = core.Core(mixer=self.mixer, backends=[])
def test_get_volume(self):
self.mixer.get_volume.return_value.get.return_value = 30
self.assertEqual(self.core.mixer.get_volume(), 30)
self.mixer.get_volume.assert_called_once_with()
def test_set_volume(self):
self.core.mixer.set_volume(30)
self.mixer.set_volume.assert_called_once_with(30)
def test_get_mute(self):
self.mixer.get_mute.return_value.get.return_value = True
self.assertEqual(self.core.mixer.get_mute(), True)
self.mixer.get_mute.assert_called_once_with()
def test_set_mute(self):
self.core.mixer.set_mute(True)
self.mixer.set_mute.assert_called_once_with(True)
|
from __future__ import absolute_import, unicode_literals
import unittest
+ import mock
+
- from mopidy import core
+ from mopidy import core, mixer
? +++++++
class CoreMixerTest(unittest.TestCase):
def setUp(self): # noqa: N802
+ self.mixer = mock.Mock(spec=mixer.Mixer)
- self.core = core.Core(mixer=None, backends=[])
? ^^^
+ self.core = core.Core(mixer=self.mixer, backends=[])
? ^ ++++++++
- def test_volume(self):
+ def test_get_volume(self):
? ++++
- self.assertEqual(self.core.mixer.get_volume(), None)
+ self.mixer.get_volume.return_value.get.return_value = 30
+ self.assertEqual(self.core.mixer.get_volume(), 30)
+ self.mixer.get_volume.assert_called_once_with()
+
+ def test_set_volume(self):
self.core.mixer.set_volume(30)
- self.assertEqual(self.core.mixer.get_volume(), 30)
+ self.mixer.set_volume.assert_called_once_with(30)
- self.core.mixer.set_volume(70)
+ def test_get_mute(self):
+ self.mixer.get_mute.return_value.get.return_value = True
- self.assertEqual(self.core.mixer.get_volume(), 70)
? ^^^ ^ ^^
+ self.assertEqual(self.core.mixer.get_mute(), True)
? ^ ^ ^^^^
+ self.mixer.get_mute.assert_called_once_with()
- def test_mute(self):
+ def test_set_mute(self):
? ++++
- self.assertEqual(self.core.mixer.get_mute(), False)
-
self.core.mixer.set_mute(True)
- self.assertEqual(self.core.mixer.get_mute(), True)
+ self.mixer.set_mute.assert_called_once_with(True)
|
c09f346f7a2be5bdfd5dca8821ab260494a652af
|
routines/migrate-all.py
|
routines/migrate-all.py
|
from pmxbot import logging
from pmxbot import util
from pmxbot import rss
from pmxbot import storage
storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
|
import importlib
import pmxbot.storage
def run():
# load the storage classes so the migration routine will find them.
for mod in ('pmxbot.logging', 'pmxbot.karma', 'pmxbot.quotes',
'pmxbot.rss'):
importlib.import_module(mod)
pmxbot.storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
if __name__ == '__main__':
run()
|
Update migration script so it only runs if executed as a script. Also updated module references.
|
Update migration script so it only runs if executed as a script. Also updated module references.
|
Python
|
bsd-3-clause
|
jamwt/diesel-pmxbot,jamwt/diesel-pmxbot
|
+ import importlib
- from pmxbot import logging
- from pmxbot import util
- from pmxbot import rss
- from pmxbot import storage
- storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
+ import pmxbot.storage
+
+ def run():
+ # load the storage classes so the migration routine will find them.
+ for mod in ('pmxbot.logging', 'pmxbot.karma', 'pmxbot.quotes',
+ 'pmxbot.rss'):
+ importlib.import_module(mod)
+ pmxbot.storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
+
+ if __name__ == '__main__':
+ run()
+
|
Update migration script so it only runs if executed as a script. Also updated module references.
|
## Code Before:
from pmxbot import logging
from pmxbot import util
from pmxbot import rss
from pmxbot import storage
storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
## Instruction:
Update migration script so it only runs if executed as a script. Also updated module references.
## Code After:
import importlib
import pmxbot.storage
def run():
# load the storage classes so the migration routine will find them.
for mod in ('pmxbot.logging', 'pmxbot.karma', 'pmxbot.quotes',
'pmxbot.rss'):
importlib.import_module(mod)
pmxbot.storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
if __name__ == '__main__':
run()
|
- from pmxbot import logging
- from pmxbot import util
- from pmxbot import rss
- from pmxbot import storage
+ import importlib
+
+ import pmxbot.storage
+
+ def run():
+ # load the storage classes so the migration routine will find them.
+ for mod in ('pmxbot.logging', 'pmxbot.karma', 'pmxbot.quotes',
+ 'pmxbot.rss'):
+ importlib.import_module(mod)
- storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
+ pmxbot.storage.migrate_all('sqlite:pmxbot.sqlite', 'mongodb://localhost')
? ++++++++
+
+ if __name__ == '__main__':
+ run()
|
6d018ef0ac8bc020b38dab1dd29dd6e383be2e8e
|
src/sentry_heroku/plugin.py
|
src/sentry_heroku/plugin.py
|
import sentry_heroku
from sentry.plugins import ReleaseHook, ReleaseTrackingPlugin
class HerokuReleaseHook(ReleaseHook):
def handle(self, request):
self.finish_release(
version=request.POST['head_long'],
)
class HerokuPlugin(ReleaseTrackingPlugin):
author = 'Sentry Team'
author_url = 'https://github.com/getsentry'
resource_links = (
('Bug Tracker', 'https://github.com/getsentry/sentry-heroku/issues'),
('Source', 'https://github.com/getsentry/sentry-heroku'),
)
title = 'Heroku'
slug = 'heroku'
description = 'Integrate Heroku release tracking.'
version = sentry_heroku.VERSION
def get_release_doc_html(self, hook_url):
return """
<p>Add Sentry as a deploy hook to automatically track new releases.</p>
<pre class="clippy">heroku addons:create deployhooks:http --url={hook_url}</pre>
""".format(hook_url=hook_url)
def get_release_hook(self):
return HerokuReleaseHook
|
import sentry_heroku
from sentry.plugins import ReleaseHook, ReleaseTrackingPlugin
class HerokuReleaseHook(ReleaseHook):
def handle(self, request):
self.finish_release(
version=request.POST['head_long'],
url=request.POST['url'],
environment=request.POST['app'],
)
class HerokuPlugin(ReleaseTrackingPlugin):
author = 'Sentry Team'
author_url = 'https://github.com/getsentry'
resource_links = (
('Bug Tracker', 'https://github.com/getsentry/sentry-heroku/issues'),
('Source', 'https://github.com/getsentry/sentry-heroku'),
)
title = 'Heroku'
slug = 'heroku'
description = 'Integrate Heroku release tracking.'
version = sentry_heroku.VERSION
def get_release_doc_html(self, hook_url):
return """
<p>Add Sentry as a deploy hook to automatically track new releases.</p>
<pre class="clippy">heroku addons:create deployhooks:http --url={hook_url}</pre>
""".format(hook_url=hook_url)
def get_release_hook(self):
return HerokuReleaseHook
|
Add url and environment to payload
|
Add url and environment to payload
|
Python
|
apache-2.0
|
getsentry/sentry-heroku
|
import sentry_heroku
from sentry.plugins import ReleaseHook, ReleaseTrackingPlugin
class HerokuReleaseHook(ReleaseHook):
def handle(self, request):
self.finish_release(
version=request.POST['head_long'],
+ url=request.POST['url'],
+ environment=request.POST['app'],
)
class HerokuPlugin(ReleaseTrackingPlugin):
author = 'Sentry Team'
author_url = 'https://github.com/getsentry'
resource_links = (
('Bug Tracker', 'https://github.com/getsentry/sentry-heroku/issues'),
('Source', 'https://github.com/getsentry/sentry-heroku'),
)
title = 'Heroku'
slug = 'heroku'
description = 'Integrate Heroku release tracking.'
version = sentry_heroku.VERSION
def get_release_doc_html(self, hook_url):
return """
<p>Add Sentry as a deploy hook to automatically track new releases.</p>
<pre class="clippy">heroku addons:create deployhooks:http --url={hook_url}</pre>
""".format(hook_url=hook_url)
def get_release_hook(self):
return HerokuReleaseHook
|
Add url and environment to payload
|
## Code Before:
import sentry_heroku
from sentry.plugins import ReleaseHook, ReleaseTrackingPlugin
class HerokuReleaseHook(ReleaseHook):
def handle(self, request):
self.finish_release(
version=request.POST['head_long'],
)
class HerokuPlugin(ReleaseTrackingPlugin):
author = 'Sentry Team'
author_url = 'https://github.com/getsentry'
resource_links = (
('Bug Tracker', 'https://github.com/getsentry/sentry-heroku/issues'),
('Source', 'https://github.com/getsentry/sentry-heroku'),
)
title = 'Heroku'
slug = 'heroku'
description = 'Integrate Heroku release tracking.'
version = sentry_heroku.VERSION
def get_release_doc_html(self, hook_url):
return """
<p>Add Sentry as a deploy hook to automatically track new releases.</p>
<pre class="clippy">heroku addons:create deployhooks:http --url={hook_url}</pre>
""".format(hook_url=hook_url)
def get_release_hook(self):
return HerokuReleaseHook
## Instruction:
Add url and environment to payload
## Code After:
import sentry_heroku
from sentry.plugins import ReleaseHook, ReleaseTrackingPlugin
class HerokuReleaseHook(ReleaseHook):
def handle(self, request):
self.finish_release(
version=request.POST['head_long'],
url=request.POST['url'],
environment=request.POST['app'],
)
class HerokuPlugin(ReleaseTrackingPlugin):
author = 'Sentry Team'
author_url = 'https://github.com/getsentry'
resource_links = (
('Bug Tracker', 'https://github.com/getsentry/sentry-heroku/issues'),
('Source', 'https://github.com/getsentry/sentry-heroku'),
)
title = 'Heroku'
slug = 'heroku'
description = 'Integrate Heroku release tracking.'
version = sentry_heroku.VERSION
def get_release_doc_html(self, hook_url):
return """
<p>Add Sentry as a deploy hook to automatically track new releases.</p>
<pre class="clippy">heroku addons:create deployhooks:http --url={hook_url}</pre>
""".format(hook_url=hook_url)
def get_release_hook(self):
return HerokuReleaseHook
|
import sentry_heroku
from sentry.plugins import ReleaseHook, ReleaseTrackingPlugin
class HerokuReleaseHook(ReleaseHook):
def handle(self, request):
self.finish_release(
version=request.POST['head_long'],
+ url=request.POST['url'],
+ environment=request.POST['app'],
)
class HerokuPlugin(ReleaseTrackingPlugin):
author = 'Sentry Team'
author_url = 'https://github.com/getsentry'
resource_links = (
('Bug Tracker', 'https://github.com/getsentry/sentry-heroku/issues'),
('Source', 'https://github.com/getsentry/sentry-heroku'),
)
title = 'Heroku'
slug = 'heroku'
description = 'Integrate Heroku release tracking.'
version = sentry_heroku.VERSION
def get_release_doc_html(self, hook_url):
return """
<p>Add Sentry as a deploy hook to automatically track new releases.</p>
<pre class="clippy">heroku addons:create deployhooks:http --url={hook_url}</pre>
""".format(hook_url=hook_url)
def get_release_hook(self):
return HerokuReleaseHook
|
66e67e53360a9f49ae73c8c8f2de49991525363b
|
txircd/modules/cmode_t.py
|
txircd/modules/cmode_t.py
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
- if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
+ if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
## Code Before:
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
## Instruction:
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
## Code After:
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
- if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
? --------------------
+ if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
? ++++++++++++++++++++
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
853dc6b254c66807fd6c44b374c89b90069f55b5
|
Lib/test/test_startfile.py
|
Lib/test/test_startfile.py
|
import unittest
from test import test_support
import os
class TestCase(unittest.TestCase):
def test_nonexisting(self):
self.assertRaises(OSError, os.startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
self.assertRaises(OSError, os.startfile, u"nonexisting.vbs")
def test_empty(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
os.startfile(empty)
os.startfile(empty, "open")
def test_empty_u(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
os.startfile(unicode(empty, "mbcs"))
os.startfile(unicode(empty, "mbcs"), "open")
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
|
import unittest
from test import test_support
# use this form so that the test is skipped when startfile is not available:
from os import startfile
class TestCase(unittest.TestCase):
def test_nonexisting(self):
self.assertRaises(OSError, startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
self.assertRaises(OSError, startfile, u"nonexisting.vbs")
def test_empty(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
startfile(empty)
startfile(empty, "open")
def test_empty_u(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
startfile(unicode(empty, "mbcs"))
startfile(unicode(empty, "mbcs"), "open")
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
|
Change the import statement so that the test is skipped when os.startfile is not present.
|
Change the import statement so that the test is skipped when
os.startfile is not present.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
import unittest
from test import test_support
- import os
+ # use this form so that the test is skipped when startfile is not available:
+ from os import startfile
class TestCase(unittest.TestCase):
def test_nonexisting(self):
- self.assertRaises(OSError, os.startfile, "nonexisting.vbs")
+ self.assertRaises(OSError, startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
- self.assertRaises(OSError, os.startfile, u"nonexisting.vbs")
+ self.assertRaises(OSError, startfile, u"nonexisting.vbs")
def test_empty(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
- os.startfile(empty)
+ startfile(empty)
- os.startfile(empty, "open")
+ startfile(empty, "open")
def test_empty_u(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
- os.startfile(unicode(empty, "mbcs"))
+ startfile(unicode(empty, "mbcs"))
- os.startfile(unicode(empty, "mbcs"), "open")
+ startfile(unicode(empty, "mbcs"), "open")
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
|
Change the import statement so that the test is skipped when os.startfile is not present.
|
## Code Before:
import unittest
from test import test_support
import os
class TestCase(unittest.TestCase):
def test_nonexisting(self):
self.assertRaises(OSError, os.startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
self.assertRaises(OSError, os.startfile, u"nonexisting.vbs")
def test_empty(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
os.startfile(empty)
os.startfile(empty, "open")
def test_empty_u(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
os.startfile(unicode(empty, "mbcs"))
os.startfile(unicode(empty, "mbcs"), "open")
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
## Instruction:
Change the import statement so that the test is skipped when os.startfile is not present.
## Code After:
import unittest
from test import test_support
# use this form so that the test is skipped when startfile is not available:
from os import startfile
class TestCase(unittest.TestCase):
def test_nonexisting(self):
self.assertRaises(OSError, startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
self.assertRaises(OSError, startfile, u"nonexisting.vbs")
def test_empty(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
startfile(empty)
startfile(empty, "open")
def test_empty_u(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
startfile(unicode(empty, "mbcs"))
startfile(unicode(empty, "mbcs"), "open")
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
|
import unittest
from test import test_support
- import os
+ # use this form so that the test is skipped when startfile is not available:
+ from os import startfile
class TestCase(unittest.TestCase):
def test_nonexisting(self):
- self.assertRaises(OSError, os.startfile, "nonexisting.vbs")
? ---
+ self.assertRaises(OSError, startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
- self.assertRaises(OSError, os.startfile, u"nonexisting.vbs")
? ---
+ self.assertRaises(OSError, startfile, u"nonexisting.vbs")
def test_empty(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
- os.startfile(empty)
? ---
+ startfile(empty)
- os.startfile(empty, "open")
? ---
+ startfile(empty, "open")
def test_empty_u(self):
empty = os.path.join(os.path.dirname(__file__), "empty.vbs")
- os.startfile(unicode(empty, "mbcs"))
? ---
+ startfile(unicode(empty, "mbcs"))
- os.startfile(unicode(empty, "mbcs"), "open")
? ---
+ startfile(unicode(empty, "mbcs"), "open")
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
|
6c7ca64fbd93ab52dfc1ba792fd314395483d651
|
piazza_api/piazza.py
|
piazza_api/piazza.py
|
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
|
from .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
|
Add login methods to Piazza
|
feat(user): Add login methods to Piazza
|
Python
|
mit
|
hfaran/piazza-api,kwangkim/piazza-api
|
+ from .rpc import PiazzaRPC
+
+
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
+ self._rpc_api = None
+
+ def user_login(self, email=None, password=None):
+ """Login with email, password and get back a session cookie
+
+ :type email: str
+ :param email: The email used for authentication
+ :type password: str
+ :param password: The password used for authentication
+ """
+ self._rpc_api = PiazzaRPC()
+ self._rpc_api.user_login(email=email, password=password)
+
+ def demo_login(self, auth=None, url=None):
+ """Authenticate with a "Share Your Class" URL using a demo user.
+
+ You may provide either the entire ``url`` or simply the ``auth``
+ parameter.
+
+ :param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
+ :param auth: Example - "06c111b"
+ """
+ self._rpc_api = PiazzaRPC()
+ self._rpc_api.demo_login(auth=auth, url=url)
+
+ def network(self, network_id):
+ """Returns Network instance for ``network_id``
+
+ :type nid: str
+ :param nid: This is the ID of the network (or class) from which
+ to query posts. This can be found by visiting your class page
+ on Piazza's web UI and grabbing it from
+ https://piazza.com/class/{network_id}
+ """
pass
|
Add login methods to Piazza
|
## Code Before:
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
pass
## Instruction:
Add login methods to Piazza
## Code After:
from .rpc import PiazzaRPC
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
self._rpc_api = None
def user_login(self, email=None, password=None):
"""Login with email, password and get back a session cookie
:type email: str
:param email: The email used for authentication
:type password: str
:param password: The password used for authentication
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.user_login(email=email, password=password)
def demo_login(self, auth=None, url=None):
"""Authenticate with a "Share Your Class" URL using a demo user.
You may provide either the entire ``url`` or simply the ``auth``
parameter.
:param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
:param auth: Example - "06c111b"
"""
self._rpc_api = PiazzaRPC()
self._rpc_api.demo_login(auth=auth, url=url)
def network(self, network_id):
"""Returns Network instance for ``network_id``
:type nid: str
:param nid: This is the ID of the network (or class) from which
to query posts. This can be found by visiting your class page
on Piazza's web UI and grabbing it from
https://piazza.com/class/{network_id}
"""
pass
|
+ from .rpc import PiazzaRPC
+
+
class Piazza(object):
"""Unofficial Client for Piazza's Internal API"""
def __init__(self):
+ self._rpc_api = None
+
+ def user_login(self, email=None, password=None):
+ """Login with email, password and get back a session cookie
+
+ :type email: str
+ :param email: The email used for authentication
+ :type password: str
+ :param password: The password used for authentication
+ """
+ self._rpc_api = PiazzaRPC()
+ self._rpc_api.user_login(email=email, password=password)
+
+ def demo_login(self, auth=None, url=None):
+ """Authenticate with a "Share Your Class" URL using a demo user.
+
+ You may provide either the entire ``url`` or simply the ``auth``
+ parameter.
+
+ :param url: Example - "https://piazza.com/demo_login?nid=hbj11a1gcvl1s6&auth=06c111b"
+ :param auth: Example - "06c111b"
+ """
+ self._rpc_api = PiazzaRPC()
+ self._rpc_api.demo_login(auth=auth, url=url)
+
+ def network(self, network_id):
+ """Returns Network instance for ``network_id``
+
+ :type nid: str
+ :param nid: This is the ID of the network (or class) from which
+ to query posts. This can be found by visiting your class page
+ on Piazza's web UI and grabbing it from
+ https://piazza.com/class/{network_id}
+ """
pass
|
0dddfcbdb46ac91ddc0bfed4482bce049a8593c2
|
lazyblacksmith/views/blueprint.py
|
lazyblacksmith/views/blueprint.py
|
from flask import Blueprint
from flask import render_template
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
regions = Region.query.filter_by(wh=False)
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
import config
from flask import Blueprint
from flask import render_template
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
regions = Region.query.filter(
Region.id.in_(config.CREST_REGION_PRICE)
).filter_by(
wh=False
)
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
Change region list to match config
|
Change region list to match config
|
Python
|
bsd-3-clause
|
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
|
+ import config
+
from flask import Blueprint
from flask import render_template
-
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
- regions = Region.query.filter_by(wh=False)
+ regions = Region.query.filter(
+ Region.id.in_(config.CREST_REGION_PRICE)
+ ).filter_by(
+ wh=False
+ )
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
Change region list to match config
|
## Code Before:
from flask import Blueprint
from flask import render_template
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
regions = Region.query.filter_by(wh=False)
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
## Instruction:
Change region list to match config
## Code After:
import config
from flask import Blueprint
from flask import render_template
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
regions = Region.query.filter(
Region.id.in_(config.CREST_REGION_PRICE)
).filter_by(
wh=False
)
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
+ import config
+
from flask import Blueprint
from flask import render_template
-
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
- regions = Region.query.filter_by(wh=False)
? --- ---------
+ regions = Region.query.filter(
+ Region.id.in_(config.CREST_REGION_PRICE)
+ ).filter_by(
+ wh=False
+ )
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
d63302f10bf9972680c189a25f995b713e72562f
|
demo/apps/catalogue/models.py
|
demo/apps/catalogue/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
Set name field on save
|
Set name field on save
|
Python
|
mit
|
pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
- FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
+ def save(self, *args, **kwargs):
+ self.name = self.title
+ super(Category, self).save(*args, **kwargs)
+
from oscar.apps.catalogue.models import * # noqa
|
Set name field on save
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
## Instruction:
Set name field on save
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
- FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
+ def save(self, *args, **kwargs):
+ self.name = self.title
+ super(Category, self).save(*args, **kwargs)
+
from oscar.apps.catalogue.models import * # noqa
|
fd6cc34c682c773273bcdd9d09d2f7f2e4d91700
|
ocr/tfhelpers.py
|
ocr/tfhelpers.py
|
import tensorflow as tf
class Graph():
""" Loading and running isolated tf graph """
def __init__(self, loc):
self.graph = tf.Graph()
self.sess = tf.Session(graph=self.graph)
with self.graph.as_default():
saver = tf.train.import_meta_graph(loc + '.meta', clear_devices=True)
saver.restore(self.sess, loc)
self.activation = tf.get_collection('activation')[0]
# To launch the graph
def run(self, data):
return self.sess.run(self.activation, feed_dict={"x:0": data})
|
import tensorflow as tf
class Graph():
""" Loading and running isolated tf graph """
def __init__(self, loc, operation='activation', input_name='x'):
"""
loc: location of file containing saved model
operation: name of operation for running the model
input_name: name of input placeholder
"""
self.input = input_name + ":0"
self.graph = tf.Graph()
self.sess = tf.Session(graph=self.graph)
with self.graph.as_default():
saver = tf.train.import_meta_graph(loc + '.meta', clear_devices=True)
saver.restore(self.sess, loc)
self.op = self.graph.get_operation_by_name(operation).outputs[0]
def run(self, data):
""" Run the specified operation on given data """
return self.sess.run(self.op, feed_dict={self.input: data})
|
Update Graph class for loading saved models Requires renaming operations in models -> re-train them
|
Update Graph class for loading saved models
Requires renaming operations in models -> re-train them
|
Python
|
mit
|
Breta01/handwriting-ocr
|
import tensorflow as tf
class Graph():
""" Loading and running isolated tf graph """
- def __init__(self, loc):
+ def __init__(self, loc, operation='activation', input_name='x'):
+ """
+ loc: location of file containing saved model
+ operation: name of operation for running the model
+ input_name: name of input placeholder
+ """
+ self.input = input_name + ":0"
self.graph = tf.Graph()
self.sess = tf.Session(graph=self.graph)
with self.graph.as_default():
saver = tf.train.import_meta_graph(loc + '.meta', clear_devices=True)
saver.restore(self.sess, loc)
- self.activation = tf.get_collection('activation')[0]
- # To launch the graph
+ self.op = self.graph.get_operation_by_name(operation).outputs[0]
+
def run(self, data):
+ """ Run the specified operation on given data """
- return self.sess.run(self.activation, feed_dict={"x:0": data})
+ return self.sess.run(self.op, feed_dict={self.input: data})
|
Update Graph class for loading saved models Requires renaming operations in models -> re-train them
|
## Code Before:
import tensorflow as tf
class Graph():
""" Loading and running isolated tf graph """
def __init__(self, loc):
self.graph = tf.Graph()
self.sess = tf.Session(graph=self.graph)
with self.graph.as_default():
saver = tf.train.import_meta_graph(loc + '.meta', clear_devices=True)
saver.restore(self.sess, loc)
self.activation = tf.get_collection('activation')[0]
# To launch the graph
def run(self, data):
return self.sess.run(self.activation, feed_dict={"x:0": data})
## Instruction:
Update Graph class for loading saved models Requires renaming operations in models -> re-train them
## Code After:
import tensorflow as tf
class Graph():
""" Loading and running isolated tf graph """
def __init__(self, loc, operation='activation', input_name='x'):
"""
loc: location of file containing saved model
operation: name of operation for running the model
input_name: name of input placeholder
"""
self.input = input_name + ":0"
self.graph = tf.Graph()
self.sess = tf.Session(graph=self.graph)
with self.graph.as_default():
saver = tf.train.import_meta_graph(loc + '.meta', clear_devices=True)
saver.restore(self.sess, loc)
self.op = self.graph.get_operation_by_name(operation).outputs[0]
def run(self, data):
""" Run the specified operation on given data """
return self.sess.run(self.op, feed_dict={self.input: data})
|
import tensorflow as tf
class Graph():
""" Loading and running isolated tf graph """
- def __init__(self, loc):
+ def __init__(self, loc, operation='activation', input_name='x'):
+ """
+ loc: location of file containing saved model
+ operation: name of operation for running the model
+ input_name: name of input placeholder
+ """
+ self.input = input_name + ":0"
self.graph = tf.Graph()
self.sess = tf.Session(graph=self.graph)
with self.graph.as_default():
saver = tf.train.import_meta_graph(loc + '.meta', clear_devices=True)
saver.restore(self.sess, loc)
- self.activation = tf.get_collection('activation')[0]
- # To launch the graph
+ self.op = self.graph.get_operation_by_name(operation).outputs[0]
+
def run(self, data):
+ """ Run the specified operation on given data """
- return self.sess.run(self.activation, feed_dict={"x:0": data})
? -------- ^ ^^^^^
+ return self.sess.run(self.op, feed_dict={self.input: data})
? ^ ^^^^^^^^^^
|
c3d22dd13bf56e65452e2e7d634c527d66e2a3b4
|
pyptools/objects.py
|
pyptools/objects.py
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
Fix a bug where splitlines was called twice for parse_file
|
Fix a bug where splitlines was called twice for parse_file
|
Python
|
mit
|
tandreas/pyptools
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
- return self.parse(value.read().splitlines(), **kwargs)
+ return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
Fix a bug where splitlines was called twice for parse_file
|
## Code Before:
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
## Instruction:
Fix a bug where splitlines was called twice for parse_file
## Code After:
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
- return self.parse(value.read().splitlines(), **kwargs)
? -------------
+ return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
886539f4bd3d67938f90b6500ee625db470284a2
|
UM/View/CompositePass.py
|
UM/View/CompositePass.py
|
from UM.Resources import Resources
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, name, width, height):
super().__init__(name, width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._shader.setUniformValue("u_layer_count", texture_unit + 1)
self._shader.setUniformValueArray("u_layers", [range(0, texture_unit)], texture_unit + 1)
self.renderQuad()
self._shader.release()
|
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
Make basic composite pass work
|
Make basic composite pass work
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
+ from UM.Application import Application
from UM.Resources import Resources
+
+ from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
- def __init__(self, name, width, height):
+ def __init__(self, width, height):
- super().__init__(name, width, height)
+ super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
+ self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
- for render_pass in renderer.getRenderPasses():
+ for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
+ self._renderer.renderQuad(self._shader)
- self._shader.setUniformValue("u_layer_count", texture_unit + 1)
- self._shader.setUniformValueArray("u_layers", [range(0, texture_unit)], texture_unit + 1)
- self.renderQuad()
+ for i in range(texture_unit):
+ self._gl.glActiveTexture(texture_unit)
+ self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
Make basic composite pass work
|
## Code Before:
from UM.Resources import Resources
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, name, width, height):
super().__init__(name, width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._shader.setUniformValue("u_layer_count", texture_unit + 1)
self._shader.setUniformValueArray("u_layers", [range(0, texture_unit)], texture_unit + 1)
self.renderQuad()
self._shader.release()
## Instruction:
Make basic composite pass work
## Code After:
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
+ from UM.Application import Application
from UM.Resources import Resources
+
+ from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
- def __init__(self, name, width, height):
? ------
+ def __init__(self, width, height):
- super().__init__(name, width, height)
? ^^
+ super().__init__("composite", width, height)
? ^^^ +++++ +
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
+ self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
- for render_pass in renderer.getRenderPasses():
+ for render_pass in self._renderer.getRenderPasses():
? ++++++
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
+ self._renderer.renderQuad(self._shader)
- self._shader.setUniformValue("u_layer_count", texture_unit + 1)
- self._shader.setUniformValueArray("u_layers", [range(0, texture_unit)], texture_unit + 1)
- self.renderQuad()
+ for i in range(texture_unit):
+ self._gl.glActiveTexture(texture_unit)
+ self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
fda1b41890ea338e992ddd8a23d9c6a497990ea2
|
fabfile/eg.py
|
fabfile/eg.py
|
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
Add fabric task for Quora example
|
Add fabric task for Quora example
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
+ def quora():
+ with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
+ local('pip install spacy')
+ local('python -m spacy.en.download')
+ local('python examples/quora_similarity.py')
+
+ @task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
Add fabric task for Quora example
|
## Code Before:
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
## Instruction:
Add fabric task for Quora example
## Code After:
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
+ def quora():
+ with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
+ local('pip install spacy')
+ local('python -m spacy.en.download')
+ local('python examples/quora_similarity.py')
+
+ @task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
94245d7a52a274c6763382a10e3a1dbe0b2cbf18
|
cea/interfaces/dashboard/api/dashboard.py
|
cea/interfaces/dashboard/api/dashboard.py
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
Allow 'scenario-name' to be null if it does not exist
|
Allow 'scenario-name' to be null if it does not exist
|
Python
|
mit
|
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
- 'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
+ 'plots': [{'title': plot.title, 'scenario':
+ plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
+ for plot in d.plots]} for d in dashboards]
|
Allow 'scenario-name' to be null if it does not exist
|
## Code Before:
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
## Instruction:
Allow 'scenario-name' to be null if it does not exist
## Code After:
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
'plots': [{'title': plot.title, 'scenario':
plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
for plot in d.plots]} for d in dashboards]
|
from flask_restplus import Namespace, Resource, fields, abort
import cea.config
import cea.plots.cache
api = Namespace('Dashboard', description='Dashboard plots')
LAYOUTS = ['row', 'grid', 'map']
CATEGORIES = {c.name: {'label': c.label, 'plots': [{'id': p.id(), 'name': p.name} for p in c.plots]}
for c in cea.plots.categories.list_categories()}
@api.route('/')
class Dashboard(Resource):
def get(self):
"""
Get Dashboards from yaml file
"""
config = cea.config.Configuration()
plot_cache = cea.plots.cache.PlotCache(config)
dashboards = cea.plots.read_dashboards(config, plot_cache)
return [{'name': d.name, 'description': d.description, 'layout': d.layout if d.layout in LAYOUTS else 'row',
- 'plots': [{'title': plot.title, 'scenario': plot.parameters['scenario-name']} for plot in d.plots]} for d in dashboards]
+ 'plots': [{'title': plot.title, 'scenario':
+ plot.parameters['scenario-name'] if 'scenario-name' in plot.parameters.keys() else None}
+ for plot in d.plots]} for d in dashboards]
|
dd1ed907532526a4a70694c46918136ca6d93277
|
nqueens/nqueens.py
|
nqueens/nqueens.py
|
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
board = Chessboard.create(8)
solver = Solver.create(board)
solution = solver.solve()
if solution is not None:
printer = Printer.create(solution)
printer.printBoard()
|
import os
import sys
import getopt
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
def main():
try:
n = parse_command_line()
except ValueError as e:
print("Error: " + str(e))
print("Usage: nqueens.py <n>")
sys.exit(1)
solution = solve_for(n)
if solution is None:
print("No solution found")
else:
print_solution(solution)
def parse_command_line():
try:
opts, args = getopt.getopt(sys.argv[1:], "", [])
except getopt.GetoptError:
raise ValueError("Could not parse command line")
if len(args) == 0:
raise ValueError("No arguments supplied")
if len(args) > 1:
raise ValueError("Too many arguments supplied")
n = args[0]
if not n.isdigit() or int(n) < 1:
raise ValueError("n must be a positive number")
return int(n)
def solve_for(n):
board = Chessboard.create(n)
solver = Solver.create(board)
return solver.solve()
def print_solution(solution):
printer = Printer.create(solution)
printer.printBoard()
if __name__ == '__main__':
sys.exit(main())
|
Add ability to run problems from command line
|
Add ability to run problems from command line
|
Python
|
mit
|
stevecshanks/nqueens
|
+ import os
+ import sys
+ import getopt
+ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
+ def main():
+ try:
+ n = parse_command_line()
+ except ValueError as e:
+ print("Error: " + str(e))
+ print("Usage: nqueens.py <n>")
+ sys.exit(1)
+ solution = solve_for(n)
+ if solution is None:
+ print("No solution found")
+ else:
+ print_solution(solution)
+
+
+ def parse_command_line():
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "", [])
+ except getopt.GetoptError:
+ raise ValueError("Could not parse command line")
+ if len(args) == 0:
+ raise ValueError("No arguments supplied")
+ if len(args) > 1:
+ raise ValueError("Too many arguments supplied")
+ n = args[0]
+ if not n.isdigit() or int(n) < 1:
+ raise ValueError("n must be a positive number")
+ return int(n)
+
+
+ def solve_for(n):
- board = Chessboard.create(8)
+ board = Chessboard.create(n)
- solver = Solver.create(board)
+ solver = Solver.create(board)
- solution = solver.solve()
- if solution is not None:
+ return solver.solve()
+
+
+ def print_solution(solution):
printer = Printer.create(solution)
printer.printBoard()
+
+ if __name__ == '__main__':
+ sys.exit(main())
+
|
Add ability to run problems from command line
|
## Code Before:
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
board = Chessboard.create(8)
solver = Solver.create(board)
solution = solver.solve()
if solution is not None:
printer = Printer.create(solution)
printer.printBoard()
## Instruction:
Add ability to run problems from command line
## Code After:
import os
import sys
import getopt
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
def main():
try:
n = parse_command_line()
except ValueError as e:
print("Error: " + str(e))
print("Usage: nqueens.py <n>")
sys.exit(1)
solution = solve_for(n)
if solution is None:
print("No solution found")
else:
print_solution(solution)
def parse_command_line():
try:
opts, args = getopt.getopt(sys.argv[1:], "", [])
except getopt.GetoptError:
raise ValueError("Could not parse command line")
if len(args) == 0:
raise ValueError("No arguments supplied")
if len(args) > 1:
raise ValueError("Too many arguments supplied")
n = args[0]
if not n.isdigit() or int(n) < 1:
raise ValueError("n must be a positive number")
return int(n)
def solve_for(n):
board = Chessboard.create(n)
solver = Solver.create(board)
return solver.solve()
def print_solution(solution):
printer = Printer.create(solution)
printer.printBoard()
if __name__ == '__main__':
sys.exit(main())
|
+ import os
+ import sys
+ import getopt
+ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from nqueens.chessboard import Chessboard
from nqueens.printer import Printer
from nqueens.solver import Solver
+ def main():
+ try:
+ n = parse_command_line()
+ except ValueError as e:
+ print("Error: " + str(e))
+ print("Usage: nqueens.py <n>")
+ sys.exit(1)
+ solution = solve_for(n)
+ if solution is None:
+ print("No solution found")
+ else:
+ print_solution(solution)
+
+
+ def parse_command_line():
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "", [])
+ except getopt.GetoptError:
+ raise ValueError("Could not parse command line")
+ if len(args) == 0:
+ raise ValueError("No arguments supplied")
+ if len(args) > 1:
+ raise ValueError("Too many arguments supplied")
+ n = args[0]
+ if not n.isdigit() or int(n) < 1:
+ raise ValueError("n must be a positive number")
+ return int(n)
+
+
+ def solve_for(n):
- board = Chessboard.create(8)
? ^
+ board = Chessboard.create(n)
? ++++ ^
- solver = Solver.create(board)
+ solver = Solver.create(board)
? ++++
- solution = solver.solve()
- if solution is not None:
+ return solver.solve()
+
+
+ def print_solution(solution):
printer = Printer.create(solution)
printer.printBoard()
+
+
+ if __name__ == '__main__':
+ sys.exit(main())
|
6db982edae5e1cac2bc254651dd7264cd289130d
|
astropy/nddata/__init__.py
|
astropy/nddata/__init__.py
|
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
Add NDDataBase to package import
|
Add NDDataBase to package import
|
Python
|
bsd-3-clause
|
mhvk/astropy,stargaser/astropy,funbaker/astropy,aleksandr-bakanov/astropy,kelle/astropy,AustereCuriosity/astropy,funbaker/astropy,larrybradley/astropy,joergdietrich/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,MSeifert04/astropy,larrybradley/astropy,stargaser/astropy,bsipocz/astropy,joergdietrich/astropy,lpsinger/astropy,pllim/astropy,lpsinger/astropy,tbabej/astropy,dhomeier/astropy,joergdietrich/astropy,DougBurke/astropy,StuartLittlefair/astropy,stargaser/astropy,saimn/astropy,astropy/astropy,astropy/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,MSeifert04/astropy,astropy/astropy,dhomeier/astropy,funbaker/astropy,dhomeier/astropy,mhvk/astropy,funbaker/astropy,pllim/astropy,saimn/astropy,MSeifert04/astropy,saimn/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,joergdietrich/astropy,tbabej/astropy,mhvk/astropy,lpsinger/astropy,pllim/astropy,saimn/astropy,bsipocz/astropy,MSeifert04/astropy,AustereCuriosity/astropy,tbabej/astropy,larrybradley/astropy,StuartLittlefair/astropy,kelle/astropy,StuartLittlefair/astropy,joergdietrich/astropy,kelle/astropy,lpsinger/astropy,pllim/astropy,DougBurke/astropy,mhvk/astropy,larrybradley/astropy,astropy/astropy,DougBurke/astropy,pllim/astropy,stargaser/astropy,bsipocz/astropy,bsipocz/astropy,mhvk/astropy,kelle/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,tbabej/astropy,tbabej/astropy,saimn/astropy,DougBurke/astropy,astropy/astropy,lpsinger/astropy,AustereCuriosity/astropy,larrybradley/astropy,kelle/astropy
|
from .nddata import *
+ from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
Add NDDataBase to package import
|
## Code Before:
from .nddata import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
## Instruction:
Add NDDataBase to package import
## Code After:
from .nddata import *
from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
from .nddata import *
+ from .nddatabase import *
from .nduncertainty import *
from .flag_collection import *
from .decorators import *
from .arithmetic import *
from .. import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.nddata`.
"""
warn_unsupported_correlated = _config.ConfigItem(
True,
'Whether to issue a warning if `~astropy.nddata.NDData` arithmetic '
'is performed with uncertainties and the uncertainties do not '
'support the propagation of correlated uncertainties.'
)
warn_setting_unit_directly = _config.ConfigItem(
True,
'Whether to issue a warning when the `~astropy.nddata.NDData` unit '
'attribute is changed from a non-``None`` value to another value '
'that data values/uncertainties are not scaled with the unit change.'
)
conf = Conf()
|
7ff0e821b2d5e04f5d4edd198ae913a2e8e1da6e
|
micronota/db/test/test_tigrfam.py
|
micronota/db/test/test_tigrfam.py
|
from tempfile import mktemp
from unittest import TestCase, main
from os.path import dirname
from micronota.bfillings.util import _get_data_dir
from micronota.db.tigrfam import prepare_metadata
class TigrfamTests(TestCase):
def setUp(self):
self.obs_db_fp = mktemp()
self.exp_db_fp = _get_data_dir()('tigrfam.db')
self.d = dirname(self.exp_db_fp)
def test_prepare_metadata(self):
prepare_metadata(self.d, self.obs_db_fp)
with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e:
self.assertEqual(o.read(), e.read())
if __name__ == '__main__':
main()
|
from tempfile import mktemp
from unittest import TestCase, main
from os.path import dirname
from sqlite3 import connect
from micronota.bfillings.util import _get_data_dir
from micronota.db.tigrfam import prepare_metadata
class TigrfamTests(TestCase):
def setUp(self):
self.obs_db_fp = mktemp()
self.exp_db_fp = _get_data_dir()('tigrfam.db')
self.d = dirname(self.exp_db_fp)
def test_prepare_metadata(self):
prepare_metadata(self.d, self.obs_db_fp)
with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e:
co = o.cursor()
co.execute('SELECT * from tigrfam')
ce = e.cursor()
ce.execute('SELECT * from tigrfam')
self.assertCountEqual(co.fetchall(), ce.fetchall())
if __name__ == '__main__':
main()
|
Update the equality test for database files
|
Update the equality test for database files
|
Python
|
bsd-3-clause
|
RNAer/micronota,tkosciol/micronota,mortonjt/micronota,mortonjt/micronota,RNAer/micronota,biocore/micronota,biocore/micronota,tkosciol/micronota
|
from tempfile import mktemp
from unittest import TestCase, main
from os.path import dirname
+ from sqlite3 import connect
from micronota.bfillings.util import _get_data_dir
from micronota.db.tigrfam import prepare_metadata
class TigrfamTests(TestCase):
def setUp(self):
self.obs_db_fp = mktemp()
self.exp_db_fp = _get_data_dir()('tigrfam.db')
self.d = dirname(self.exp_db_fp)
def test_prepare_metadata(self):
prepare_metadata(self.d, self.obs_db_fp)
- with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e:
+ with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e:
+ co = o.cursor()
+ co.execute('SELECT * from tigrfam')
+ ce = e.cursor()
+ ce.execute('SELECT * from tigrfam')
- self.assertEqual(o.read(), e.read())
+ self.assertCountEqual(co.fetchall(), ce.fetchall())
if __name__ == '__main__':
main()
|
Update the equality test for database files
|
## Code Before:
from tempfile import mktemp
from unittest import TestCase, main
from os.path import dirname
from micronota.bfillings.util import _get_data_dir
from micronota.db.tigrfam import prepare_metadata
class TigrfamTests(TestCase):
def setUp(self):
self.obs_db_fp = mktemp()
self.exp_db_fp = _get_data_dir()('tigrfam.db')
self.d = dirname(self.exp_db_fp)
def test_prepare_metadata(self):
prepare_metadata(self.d, self.obs_db_fp)
with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e:
self.assertEqual(o.read(), e.read())
if __name__ == '__main__':
main()
## Instruction:
Update the equality test for database files
## Code After:
from tempfile import mktemp
from unittest import TestCase, main
from os.path import dirname
from sqlite3 import connect
from micronota.bfillings.util import _get_data_dir
from micronota.db.tigrfam import prepare_metadata
class TigrfamTests(TestCase):
def setUp(self):
self.obs_db_fp = mktemp()
self.exp_db_fp = _get_data_dir()('tigrfam.db')
self.d = dirname(self.exp_db_fp)
def test_prepare_metadata(self):
prepare_metadata(self.d, self.obs_db_fp)
with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e:
co = o.cursor()
co.execute('SELECT * from tigrfam')
ce = e.cursor()
ce.execute('SELECT * from tigrfam')
self.assertCountEqual(co.fetchall(), ce.fetchall())
if __name__ == '__main__':
main()
|
from tempfile import mktemp
from unittest import TestCase, main
from os.path import dirname
+ from sqlite3 import connect
from micronota.bfillings.util import _get_data_dir
from micronota.db.tigrfam import prepare_metadata
class TigrfamTests(TestCase):
def setUp(self):
self.obs_db_fp = mktemp()
self.exp_db_fp = _get_data_dir()('tigrfam.db')
self.d = dirname(self.exp_db_fp)
def test_prepare_metadata(self):
prepare_metadata(self.d, self.obs_db_fp)
- with open(self.obs_db_fp, 'rb') as o, open(self.exp_db_fp, 'rb') as e:
? ^ ^ ------ ^ ^ ------
+ with connect(self.obs_db_fp) as o, connect(self.exp_db_fp) as e:
? + ^^ ^^ + ^^ ^^
+ co = o.cursor()
+ co.execute('SELECT * from tigrfam')
+ ce = e.cursor()
+ ce.execute('SELECT * from tigrfam')
- self.assertEqual(o.read(), e.read())
? ^ ^ ^ ^
+ self.assertCountEqual(co.fetchall(), ce.fetchall())
? +++++ + ^ +++ ^^ + ^ +++ ^^
if __name__ == '__main__':
main()
|
e753038de039fd23f0d59bb0094f59fc73efe22b
|
flask_apscheduler/json.py
|
flask_apscheduler/json.py
|
import flask
import json
from datetime import datetime
from apscheduler.job import Job
from .utils import job_to_dict
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
|
import datetime
import flask
import json
from apscheduler.job import Job
from .utils import job_to_dict
loads = json.loads
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if isinstance(obj, datetime.date):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
|
Set a custom JSON Encoder to serialize date class.
|
Set a custom JSON Encoder to serialize date class.
|
Python
|
apache-2.0
|
viniciuschiele/flask-apscheduler
|
+ import datetime
import flask
import json
- from datetime import datetime
from apscheduler.job import Job
from .utils import job_to_dict
+ loads = json.loads
-
- class JSONEncoder(json.JSONEncoder):
- def default(self, obj):
- if isinstance(obj, datetime):
- return obj.isoformat()
-
- if isinstance(obj, Job):
- return job_to_dict(obj)
-
- return super(JSONEncoder, self).default(obj)
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
+
+ class JSONEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, datetime.datetime):
+ return obj.isoformat()
+
+ if isinstance(obj, datetime.date):
+ return obj.isoformat()
+
+ if isinstance(obj, Job):
+ return job_to_dict(obj)
+
+ return super(JSONEncoder, self).default(obj)
+
|
Set a custom JSON Encoder to serialize date class.
|
## Code Before:
import flask
import json
from datetime import datetime
from apscheduler.job import Job
from .utils import job_to_dict
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
## Instruction:
Set a custom JSON Encoder to serialize date class.
## Code After:
import datetime
import flask
import json
from apscheduler.job import Job
from .utils import job_to_dict
loads = json.loads
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if isinstance(obj, datetime.date):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
|
+ import datetime
import flask
import json
- from datetime import datetime
from apscheduler.job import Job
from .utils import job_to_dict
+ loads = json.loads
-
- class JSONEncoder(json.JSONEncoder):
- def default(self, obj):
- if isinstance(obj, datetime):
- return obj.isoformat()
-
- if isinstance(obj, Job):
- return job_to_dict(obj)
-
- return super(JSONEncoder, self).default(obj)
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
+
+
+ class JSONEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, datetime.datetime):
+ return obj.isoformat()
+
+ if isinstance(obj, datetime.date):
+ return obj.isoformat()
+
+ if isinstance(obj, Job):
+ return job_to_dict(obj)
+
+ return super(JSONEncoder, self).default(obj)
|
ad42da9cb3c944f5bd5e953f947a0be96a4b8e17
|
astropy/samp/tests/test_hub_proxy.py
|
astropy/samp/tests/test_hub_proxy.py
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmpdir):
lockfile = tmpdir.join('.samptest').realpath().strpath
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmp_path):
lockfile = str(tmp_path / '.samptest')
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
Python
|
bsd-3-clause
|
pllim/astropy,mhvk/astropy,lpsinger/astropy,lpsinger/astropy,mhvk/astropy,larrybradley/astropy,pllim/astropy,lpsinger/astropy,lpsinger/astropy,lpsinger/astropy,astropy/astropy,pllim/astropy,astropy/astropy,larrybradley/astropy,pllim/astropy,astropy/astropy,mhvk/astropy,larrybradley/astropy,larrybradley/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,pllim/astropy,mhvk/astropy,larrybradley/astropy
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
- def test_custom_lockfile(tmpdir):
+ def test_custom_lockfile(tmp_path):
- lockfile = tmpdir.join('.samptest').realpath().strpath
+ lockfile = str(tmp_path / '.samptest')
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
## Code Before:
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmpdir):
lockfile = tmpdir.join('.samptest').realpath().strpath
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
## Instruction:
Replace `tmpdir` with `tmp_path` in `samp` tests
## Code After:
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmp_path):
lockfile = str(tmp_path / '.samptest')
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
- def test_custom_lockfile(tmpdir):
? ^^^
+ def test_custom_lockfile(tmp_path):
? ^^^^^
- lockfile = tmpdir.join('.samptest').realpath().strpath
+ lockfile = str(tmp_path / '.samptest')
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
dd63394499c7c629033e76afa0196dfe48547da2
|
corehq/messaging/smsbackends/tropo/models.py
|
corehq/messaging/smsbackends/tropo/models.py
|
from urllib import urlencode
from urllib2 import urlopen
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.sms.models import SQLSMSBackend
from dimagi.ext.couchdbkit import *
from corehq.messaging.smsbackends.tropo.forms import TropoBackendForm
from django.conf import settings
class SQLTropoBackend(SQLSMSBackend):
class Meta:
app_label = 'sms'
proxy = True
@classmethod
def get_available_extra_fields(cls):
return [
'messaging_token',
]
@classmethod
def get_api_id(cls):
return 'TROPO'
@classmethod
def get_generic_name(cls):
return "Tropo"
@classmethod
def get_form_class(cls):
return TropoBackendForm
def get_sms_rate_limit(self):
return 60
def send(self, msg, *args, **kwargs):
phone_number = clean_phone_number(msg.phone_number)
text = msg.text.encode('utf-8')
config = self.config
params = urlencode({
'action': 'create',
'token': config.messaging_token,
'numberToDial': phone_number,
'msg': text,
'_send_sms': 'true',
})
url = 'https://api.tropo.com/1.0/sessions?%s' % params
response = urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return response
|
from urllib import urlencode
from urllib2 import urlopen
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.sms.models import SQLSMSBackend
from dimagi.ext.couchdbkit import *
from corehq.messaging.smsbackends.tropo.forms import TropoBackendForm
from django.conf import settings
class SQLTropoBackend(SQLSMSBackend):
class Meta:
app_label = 'sms'
proxy = True
@classmethod
def get_available_extra_fields(cls):
return [
'messaging_token',
]
@classmethod
def get_api_id(cls):
return 'TROPO'
@classmethod
def get_generic_name(cls):
return "Tropo"
@classmethod
def get_form_class(cls):
return TropoBackendForm
def get_sms_rate_limit(self):
return 60
@classmethod
def get_opt_in_keywords(cls):
return ['START']
@classmethod
def get_opt_out_keywords(cls):
return ['STOP']
def send(self, msg, *args, **kwargs):
phone_number = clean_phone_number(msg.phone_number)
text = msg.text.encode('utf-8')
config = self.config
params = urlencode({
'action': 'create',
'token': config.messaging_token,
'numberToDial': phone_number,
'msg': text,
'_send_sms': 'true',
})
url = 'https://api.tropo.com/1.0/sessions?%s' % params
response = urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return response
|
Add opt in/out keywords for tropo
|
Add opt in/out keywords for tropo
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
from urllib import urlencode
from urllib2 import urlopen
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.sms.models import SQLSMSBackend
from dimagi.ext.couchdbkit import *
from corehq.messaging.smsbackends.tropo.forms import TropoBackendForm
from django.conf import settings
class SQLTropoBackend(SQLSMSBackend):
class Meta:
app_label = 'sms'
proxy = True
@classmethod
def get_available_extra_fields(cls):
return [
'messaging_token',
]
@classmethod
def get_api_id(cls):
return 'TROPO'
@classmethod
def get_generic_name(cls):
return "Tropo"
@classmethod
def get_form_class(cls):
return TropoBackendForm
def get_sms_rate_limit(self):
return 60
+ @classmethod
+ def get_opt_in_keywords(cls):
+ return ['START']
+
+ @classmethod
+ def get_opt_out_keywords(cls):
+ return ['STOP']
+
def send(self, msg, *args, **kwargs):
phone_number = clean_phone_number(msg.phone_number)
text = msg.text.encode('utf-8')
config = self.config
params = urlencode({
'action': 'create',
'token': config.messaging_token,
'numberToDial': phone_number,
'msg': text,
'_send_sms': 'true',
})
url = 'https://api.tropo.com/1.0/sessions?%s' % params
response = urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return response
|
Add opt in/out keywords for tropo
|
## Code Before:
from urllib import urlencode
from urllib2 import urlopen
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.sms.models import SQLSMSBackend
from dimagi.ext.couchdbkit import *
from corehq.messaging.smsbackends.tropo.forms import TropoBackendForm
from django.conf import settings
class SQLTropoBackend(SQLSMSBackend):
class Meta:
app_label = 'sms'
proxy = True
@classmethod
def get_available_extra_fields(cls):
return [
'messaging_token',
]
@classmethod
def get_api_id(cls):
return 'TROPO'
@classmethod
def get_generic_name(cls):
return "Tropo"
@classmethod
def get_form_class(cls):
return TropoBackendForm
def get_sms_rate_limit(self):
return 60
def send(self, msg, *args, **kwargs):
phone_number = clean_phone_number(msg.phone_number)
text = msg.text.encode('utf-8')
config = self.config
params = urlencode({
'action': 'create',
'token': config.messaging_token,
'numberToDial': phone_number,
'msg': text,
'_send_sms': 'true',
})
url = 'https://api.tropo.com/1.0/sessions?%s' % params
response = urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return response
## Instruction:
Add opt in/out keywords for tropo
## Code After:
from urllib import urlencode
from urllib2 import urlopen
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.sms.models import SQLSMSBackend
from dimagi.ext.couchdbkit import *
from corehq.messaging.smsbackends.tropo.forms import TropoBackendForm
from django.conf import settings
class SQLTropoBackend(SQLSMSBackend):
class Meta:
app_label = 'sms'
proxy = True
@classmethod
def get_available_extra_fields(cls):
return [
'messaging_token',
]
@classmethod
def get_api_id(cls):
return 'TROPO'
@classmethod
def get_generic_name(cls):
return "Tropo"
@classmethod
def get_form_class(cls):
return TropoBackendForm
def get_sms_rate_limit(self):
return 60
@classmethod
def get_opt_in_keywords(cls):
return ['START']
@classmethod
def get_opt_out_keywords(cls):
return ['STOP']
def send(self, msg, *args, **kwargs):
phone_number = clean_phone_number(msg.phone_number)
text = msg.text.encode('utf-8')
config = self.config
params = urlencode({
'action': 'create',
'token': config.messaging_token,
'numberToDial': phone_number,
'msg': text,
'_send_sms': 'true',
})
url = 'https://api.tropo.com/1.0/sessions?%s' % params
response = urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return response
|
from urllib import urlencode
from urllib2 import urlopen
from corehq.apps.sms.util import clean_phone_number
from corehq.apps.sms.models import SQLSMSBackend
from dimagi.ext.couchdbkit import *
from corehq.messaging.smsbackends.tropo.forms import TropoBackendForm
from django.conf import settings
class SQLTropoBackend(SQLSMSBackend):
class Meta:
app_label = 'sms'
proxy = True
@classmethod
def get_available_extra_fields(cls):
return [
'messaging_token',
]
@classmethod
def get_api_id(cls):
return 'TROPO'
@classmethod
def get_generic_name(cls):
return "Tropo"
@classmethod
def get_form_class(cls):
return TropoBackendForm
def get_sms_rate_limit(self):
return 60
+ @classmethod
+ def get_opt_in_keywords(cls):
+ return ['START']
+
+ @classmethod
+ def get_opt_out_keywords(cls):
+ return ['STOP']
+
def send(self, msg, *args, **kwargs):
phone_number = clean_phone_number(msg.phone_number)
text = msg.text.encode('utf-8')
config = self.config
params = urlencode({
'action': 'create',
'token': config.messaging_token,
'numberToDial': phone_number,
'msg': text,
'_send_sms': 'true',
})
url = 'https://api.tropo.com/1.0/sessions?%s' % params
response = urlopen(url, timeout=settings.SMS_GATEWAY_TIMEOUT).read()
return response
|
959d20df781edb9f283f5317f50e8000f83e7ab6
|
tests/rules/test_no_such_file.py
|
tests/rules/test_no_such_file.py
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
Add `test_not_match` to `no_such_file` tests
|
Add `test_not_match` to `no_such_file` tests
|
Python
|
mit
|
manashmndl/thefuck,levythu/thefuck,qingying5810/thefuck,mlk/thefuck,vanita5/thefuck,artiya4u/thefuck,nvbn/thefuck,ostree/thefuck,lawrencebenson/thefuck,sekaiamber/thefuck,manashmndl/thefuck,thinkerchan/thefuck,princeofdarkness76/thefuck,subajat1/thefuck,PLNech/thefuck,lawrencebenson/thefuck,roth1002/thefuck,bigplus/thefuck,princeofdarkness76/thefuck,beni55/thefuck,zhangzhishan/thefuck,redreamality/thefuck,ostree/thefuck,NguyenHoaiNam/thefuck,hxddh/thefuck,BertieJim/thefuck,thesoulkiller/thefuck,bigplus/thefuck,mlk/thefuck,barneyElDinosaurio/thefuck,bugaevc/thefuck,Clpsplug/thefuck,mcarton/thefuck,SimenB/thefuck,MJerty/thefuck,levythu/thefuck,BertieJim/thefuck,vanita5/thefuck,Aeron/thefuck,thesoulkiller/thefuck,subajat1/thefuck,PLNech/thefuck,SimenB/thefuck,gogobebe2/thefuck,AntonChankin/thefuck,LawrenceHan/thefuck,mcarton/thefuck,AntonChankin/thefuck,mbbill/thefuck,Clpsplug/thefuck,LawrenceHan/thefuck,scorphus/thefuck,beni55/thefuck,MJerty/thefuck,scorphus/thefuck,thinkerchan/thefuck,redreamality/thefuck,nvbn/thefuck,roth1002/thefuck,hxddh/thefuck,barneyElDinosaurio/thefuck
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
+ @pytest.mark.parametrize('command', [
+ Command(script='mv foo bar/', stderr=""),
+ Command(script='mv foo bar/foo', stderr="mv: permission denied"),
+ ])
+ def test_not_match(command):
+ assert not match(command, None)
+
+
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
Add `test_not_match` to `no_such_file` tests
|
## Code Before:
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
## Instruction:
Add `test_not_match` to `no_such_file` tests
## Code After:
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
+ @pytest.mark.parametrize('command', [
+ Command(script='mv foo bar/', stderr=""),
+ Command(script='mv foo bar/foo', stderr="mv: permission denied"),
+ ])
+ def test_not_match(command):
+ assert not match(command, None)
+
+
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
877f59134c64f3c2e50436289b1cd676d471f66f
|
src/gramcore/features/tests/test_descriptors.py
|
src/gramcore/features/tests/test_descriptors.py
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
Add note in hog test doc string
|
Add note in hog test doc string
|
Python
|
mit
|
cpsaltis/pythogram-core
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
+
+ There are already enough tests in skimage for this, just adding so to
+ document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
Add note in hog test doc string
|
## Code Before:
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
## Instruction:
Add note in hog test doc string
## Code After:
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
There are already enough tests in skimage for this, just adding so to
document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
"""Tests for module gramcore.features.descriptors"""
import numpy
from nose.tools import assert_equal
from gramcore.features import descriptors
def test_hog_size():
"""Create a fixture and check hog result size
+
+ There are already enough tests in skimage for this, just adding so to
+ document how many values are returned and why.
Creates a square array and inputs it to hog. For simplicity the
blocks and the cells are square. The calculated orientations are set to 9.
Based on these the result should include a number of values equal to::
block_possitions^2 * cells_per_block^2 * orientations
"""
pixels_per_cell = 9
cells_per_block = 8
orientations = 9
# double the size so to generate some blocks and initialize the array
arr_dim = 2 * pixels_per_cell * cells_per_block
arr = numpy.zeros((arr_dim, arr_dim))
parameters = {'data': [arr],
'orientations': orientations,
'pixels_per_cell': [pixels_per_cell, pixels_per_cell],
'cells_per_block': [cells_per_block, cells_per_block]}
results = descriptors.hog(parameters)
# calculate how many blocks fit in the array, basically how many
# sliding window positions are there
block_positions = (arr_dim / pixels_per_cell) - cells_per_block + 1
assert_equal(results.shape[0], block_positions**2 *\
cells_per_block**2 *\
orientations)
|
60ef934e3bef7c00fc2d1823901babb665a4888f
|
get_study_attachments.py
|
get_study_attachments.py
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
study_files = []
for key in bucket.objects.filter(Prefix=f'videoStream_{study_uuid}'):
study_files.append(key)
return study_files
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
Remove looping through items and appending them to list.
|
Remove looping through items and appending them to list.
|
Python
|
apache-2.0
|
CenterForOpenScience/lookit-api,pattisdr/lookit-api,pattisdr/lookit-api,pattisdr/lookit-api,CenterForOpenScience/lookit-api,CenterForOpenScience/lookit-api
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
-
bucket = s3.Bucket(BUCKET_NAME)
- study_files = []
- for key in bucket.objects.filter(Prefix=f'videoStream_{study_uuid}'):
+ return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
- study_files.append(key)
- return study_files
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
Remove looping through items and appending them to list.
|
## Code Before:
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
study_files = []
for key in bucket.objects.filter(Prefix=f'videoStream_{study_uuid}'):
study_files.append(key)
return study_files
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
## Instruction:
Remove looping through items and appending them to list.
## Code After:
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
-
bucket = s3.Bucket(BUCKET_NAME)
- study_files = []
- for key in bucket.objects.filter(Prefix=f'videoStream_{study_uuid}'):
? -- -- ^^^ -
+ return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
? ^^^
- study_files.append(key)
- return study_files
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
bcb383612625d9a59f9e5b4174e44700b26bd0e5
|
crosscompute/macros/security.py
|
crosscompute/macros/security.py
|
from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
if datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
|
from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
if expiration_datetime and datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
|
Support case when expiration_datetime is None
|
Support case when expiration_datetime is None
|
Python
|
mit
|
crosscompute/crosscompute,crosscompute/crosscompute,crosscompute/crosscompute,crosscompute/crosscompute
|
from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
- if datetime.now() > expiration_datetime:
+ if expiration_datetime and datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
|
Support case when expiration_datetime is None
|
## Code Before:
from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
if datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
## Instruction:
Support case when expiration_datetime is None
## Code After:
from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
if expiration_datetime and datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
|
from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
- if datetime.now() > expiration_datetime:
+ if expiration_datetime and datetime.now() > expiration_datetime:
? ++++++++++++++++++++++++
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
|
daac5f26c07045ad162a481d035359dd17227c91
|
Lib/test/test_imaplib.py
|
Lib/test/test_imaplib.py
|
from test_support import verify,verbose
import imaplib
import time
# We can check only that it successfully produces a result,
# not the correctness of the result itself, since the result
# depends on the timezone the machine is in.
timevalues = [2000000000, 2000000000.0, time.localtime(2000000000),
"18-May-2033 05:33:20 +0200"]
for t in timevalues:
imaplib.Time2Internaldate(t)
|
import imaplib
import time
# We can check only that it successfully produces a result,
# not the correctness of the result itself, since the result
# depends on the timezone the machine is in.
timevalues = [2000000000, 2000000000.0, time.localtime(2000000000),
"18-May-2033 05:33:20 +0200"]
for t in timevalues:
imaplib.Time2Internaldate(t)
|
Remove unused imports, clean up trailing whitespace.
|
Remove unused imports, clean up trailing whitespace.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
- from test_support import verify,verbose
import imaplib
import time
# We can check only that it successfully produces a result,
# not the correctness of the result itself, since the result
# depends on the timezone the machine is in.
timevalues = [2000000000, 2000000000.0, time.localtime(2000000000),
"18-May-2033 05:33:20 +0200"]
for t in timevalues:
imaplib.Time2Internaldate(t)
-
|
Remove unused imports, clean up trailing whitespace.
|
## Code Before:
from test_support import verify,verbose
import imaplib
import time
# We can check only that it successfully produces a result,
# not the correctness of the result itself, since the result
# depends on the timezone the machine is in.
timevalues = [2000000000, 2000000000.0, time.localtime(2000000000),
"18-May-2033 05:33:20 +0200"]
for t in timevalues:
imaplib.Time2Internaldate(t)
## Instruction:
Remove unused imports, clean up trailing whitespace.
## Code After:
import imaplib
import time
# We can check only that it successfully produces a result,
# not the correctness of the result itself, since the result
# depends on the timezone the machine is in.
timevalues = [2000000000, 2000000000.0, time.localtime(2000000000),
"18-May-2033 05:33:20 +0200"]
for t in timevalues:
imaplib.Time2Internaldate(t)
|
- from test_support import verify,verbose
import imaplib
import time
# We can check only that it successfully produces a result,
# not the correctness of the result itself, since the result
# depends on the timezone the machine is in.
timevalues = [2000000000, 2000000000.0, time.localtime(2000000000),
"18-May-2033 05:33:20 +0200"]
for t in timevalues:
imaplib.Time2Internaldate(t)
-
|
9d94a753c4824df210753996edaa9f7910df5fa8
|
tests/test_sample_app.py
|
tests/test_sample_app.py
|
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
client.get('/')
|
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
resp = client.get('/')
assert resp.status == 200
|
Check for status code of 200 in sample app.
|
Check for status code of 200 in sample app.
|
Python
|
apache-2.0
|
JingZhou0404/flask-bootstrap,scorpiovn/flask-bootstrap,suvorom/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,suvorom/flask-bootstrap,victorbjorklund/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,livepy/flask-bootstrap,victorbjorklund/flask-bootstrap,dingocuster/flask-bootstrap,Coxious/flask-bootstrap,Coxious/flask-bootstrap,vishnugonela/flask-bootstrap,moha24/flask-bootstrap,eshijia/flask-bootstrap,dingocuster/flask-bootstrap,victorbjorklund/flask-bootstrap,eshijia/flask-bootstrap,vishnugonela/flask-bootstrap,JingZhou0404/flask-bootstrap,ser/flask-bootstrap,scorpiovn/flask-bootstrap,vishnugonela/flask-bootstrap,livepy/flask-bootstrap,JingZhou0404/flask-bootstrap,dingocuster/flask-bootstrap,livepy/flask-bootstrap,moha24/flask-bootstrap,Coxious/flask-bootstrap,BeardedSteve/flask-bootstrap,eshijia/flask-bootstrap,suvorom/flask-bootstrap,scorpiovn/flask-bootstrap,moha24/flask-bootstrap
|
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
- client.get('/')
+ resp = client.get('/')
+ assert resp.status == 200
|
Check for status code of 200 in sample app.
|
## Code Before:
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
client.get('/')
## Instruction:
Check for status code of 200 in sample app.
## Code After:
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
resp = client.get('/')
assert resp.status == 200
|
import pytest
@pytest.fixture
def app():
import sys
sys.path.append('.')
from sample_app import create_app
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
def test_index(client):
- client.get('/')
+ resp = client.get('/')
? +++++++
+ assert resp.status == 200
|
7c38eae5a07e07789713baf5ab3aaea772e76422
|
routes.py
|
routes.py
|
from flask import Flask, render_template, redirect
import psycopg2
import os
import urlparse
app = Flask(__name__)
# def connectDB(wrapped):
# def inner(*args, **kwargs):
# api_token = os.environ["API_TOKEN"]
# urlparse.uses_netloc.append("postgres")
# url = urlparse.urlparse(os.environ["DATABASE_URL"])
# conn = psycopg2.connect(
# database=url.path[1:],
# user=url.username,
# password=url.password,
# host=url.hostname,
# port=url.port
# )
# cur = conn.cursor()
# ret = wrapped(*args, **kwargs)
# return ret
# return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
# @connectDB
def participants():
return render_template('participants.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
# @connectDB
def complete():
return redirect('/')
|
from flask import Flask, render_template, redirect, request
import psycopg2
from functools import wraps
import os
import urlparse
app = Flask(__name__)
def connectDB(wrapped):
@wraps(wrapped)
def inner(*args, **kwargs):
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
return wrapped(cur, *args, **kwargs)
return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
@connectDB
def participants(*args):
return args[0]
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
@connectDB
def complete(*args):
return render_template('/success.html')
|
Add decorator to connect to database
|
Add decorator to connect to database
|
Python
|
mit
|
AlexMathew/csipy-home
|
- from flask import Flask, render_template, redirect
+ from flask import Flask, render_template, redirect, request
import psycopg2
+ from functools import wraps
import os
import urlparse
+
app = Flask(__name__)
- # def connectDB(wrapped):
+ def connectDB(wrapped):
+ @wraps(wrapped)
- # def inner(*args, **kwargs):
+ def inner(*args, **kwargs):
- # api_token = os.environ["API_TOKEN"]
- # urlparse.uses_netloc.append("postgres")
+ urlparse.uses_netloc.append("postgres")
- # url = urlparse.urlparse(os.environ["DATABASE_URL"])
+ url = urlparse.urlparse(os.environ["DATABASE_URL"])
- # conn = psycopg2.connect(
+ conn = psycopg2.connect(
- # database=url.path[1:],
+ database=url.path[1:],
- # user=url.username,
+ user=url.username,
- # password=url.password,
+ password=url.password,
- # host=url.hostname,
+ host=url.hostname,
- # port=url.port
+ port=url.port
- # )
+ )
- # cur = conn.cursor()
+ cur = conn.cursor()
- # ret = wrapped(*args, **kwargs)
+ return wrapped(cur, *args, **kwargs)
- # return ret
- # return inner
+ return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
- # @connectDB
+ @connectDB
- def participants():
+ def participants(*args):
- return render_template('participants.html')
+ return args[0]
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
- # @connectDB
+ @connectDB
- def complete():
+ def complete(*args):
- return redirect('/')
+ return render_template('/success.html')
|
Add decorator to connect to database
|
## Code Before:
from flask import Flask, render_template, redirect
import psycopg2
import os
import urlparse
app = Flask(__name__)
# def connectDB(wrapped):
# def inner(*args, **kwargs):
# api_token = os.environ["API_TOKEN"]
# urlparse.uses_netloc.append("postgres")
# url = urlparse.urlparse(os.environ["DATABASE_URL"])
# conn = psycopg2.connect(
# database=url.path[1:],
# user=url.username,
# password=url.password,
# host=url.hostname,
# port=url.port
# )
# cur = conn.cursor()
# ret = wrapped(*args, **kwargs)
# return ret
# return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
# @connectDB
def participants():
return render_template('participants.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
# @connectDB
def complete():
return redirect('/')
## Instruction:
Add decorator to connect to database
## Code After:
from flask import Flask, render_template, redirect, request
import psycopg2
from functools import wraps
import os
import urlparse
app = Flask(__name__)
def connectDB(wrapped):
@wraps(wrapped)
def inner(*args, **kwargs):
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
return wrapped(cur, *args, **kwargs)
return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
@connectDB
def participants(*args):
return args[0]
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
@connectDB
def complete(*args):
return render_template('/success.html')
|
- from flask import Flask, render_template, redirect
+ from flask import Flask, render_template, redirect, request
? +++++++++
import psycopg2
+ from functools import wraps
import os
import urlparse
+
app = Flask(__name__)
- # def connectDB(wrapped):
? --
+ def connectDB(wrapped):
+ @wraps(wrapped)
- # def inner(*args, **kwargs):
? --
+ def inner(*args, **kwargs):
- # api_token = os.environ["API_TOKEN"]
- # urlparse.uses_netloc.append("postgres")
? --
+ urlparse.uses_netloc.append("postgres")
- # url = urlparse.urlparse(os.environ["DATABASE_URL"])
? --
+ url = urlparse.urlparse(os.environ["DATABASE_URL"])
- # conn = psycopg2.connect(
? --
+ conn = psycopg2.connect(
- # database=url.path[1:],
? --
+ database=url.path[1:],
- # user=url.username,
? --
+ user=url.username,
- # password=url.password,
? --
+ password=url.password,
- # host=url.hostname,
? --
+ host=url.hostname,
- # port=url.port
? --
+ port=url.port
- # )
? --
+ )
- # cur = conn.cursor()
? --
+ cur = conn.cursor()
- # ret = wrapped(*args, **kwargs)
? -- ^^
+ return wrapped(cur, *args, **kwargs)
? ^^^ +++++
- # return ret
- # return inner
? --
+ return inner
@app.route('/')
def home():
return render_template('home.html')
@app.route('/participants')
- # @connectDB
? --
+ @connectDB
- def participants():
+ def participants(*args):
? +++++
- return render_template('participants.html')
+ return args[0]
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/register')
def register():
return render_template('register.html')
@app.route('/complete', methods=['POST'])
- # @connectDB
? --
+ @connectDB
- def complete():
+ def complete(*args):
? +++++
- return redirect('/')
+ return render_template('/success.html')
|
be2cd54386c0fb9c407ac5dc7da467547b0b426e
|
aldryn_apphooks_config/utils.py
|
aldryn_apphooks_config/utils.py
|
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve, Resolver404
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
if app and app.app_config:
try:
config = None
namespace = resolve(request.path).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
except Resolver404:
pass
return '', None
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
Add checks to get_app_instance to avoid Resolver404 even if namespace does not exists
|
Add checks to get_app_instance to avoid Resolver404 even if namespace does not exists
|
Python
|
bsd-3-clause
|
aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config
|
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
- from django.core.urlresolvers import resolve
+ from django.core.urlresolvers import resolve, Resolver404
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
- config = None
- namespace = resolve(request.path_info).namespace
if app and app.app_config:
+ try:
+ config = None
+ namespace = resolve(request.path).namespace
+ if app and app.app_config:
- config = app.get_config(namespace)
+ config = app.get_config(namespace)
- return namespace, config
+ return namespace, config
+ except Resolver404:
+ pass
+ return '', None
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
Add checks to get_app_instance to avoid Resolver404 even if namespace does not exists
|
## Code Before:
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
config = None
namespace = resolve(request.path_info).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
## Instruction:
Add checks to get_app_instance to avoid Resolver404 even if namespace does not exists
## Code After:
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
from django.core.urlresolvers import resolve, Resolver404
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
if app and app.app_config:
try:
config = None
namespace = resolve(request.path).namespace
if app and app.app_config:
config = app.get_config(namespace)
return namespace, config
except Resolver404:
pass
return '', None
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
from app_data import AppDataContainer, app_registry
from cms.apphook_pool import apphook_pool
- from django.core.urlresolvers import resolve
+ from django.core.urlresolvers import resolve, Resolver404
? +++++++++++++
def get_app_instance(request):
"""
Returns a tuple containing the current namespace and the AppHookConfig instance
:param request: request object
:return: namespace, config
"""
app = None
if getattr(request, 'current_page', None):
app = apphook_pool.get_apphook(request.current_page.application_urls)
- config = None
- namespace = resolve(request.path_info).namespace
if app and app.app_config:
+ try:
+ config = None
+ namespace = resolve(request.path).namespace
+ if app and app.app_config:
- config = app.get_config(namespace)
+ config = app.get_config(namespace)
? ++++++++
- return namespace, config
+ return namespace, config
? ++++++++
+ except Resolver404:
+ pass
+ return '', None
def setup_config(form_class, config_model):
"""
Register the provided form as config form for the provided config model
:param form_class: Form class derived from AppDataForm
:param config_model: Model class derived from AppHookConfig
:return:
"""
app_registry.register('config', AppDataContainer.from_form(form_class), config_model)
|
989abdc718973551bbb3565859d75ea0408776d0
|
example_project/example_project/urls.py
|
example_project/example_project/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
Fix URLconf for example project.
|
Fix URLconf for example project.
|
Python
|
mit
|
zsiciarz/django-pgallery,zsiciarz/django-pgallery
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
- url(r"^admin/", include(admin.site.urls)),
+ url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
+ url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
Fix URLconf for example project.
|
## Code Before:
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", include(admin.site.urls)),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
## Instruction:
Fix URLconf for example project.
## Code After:
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.static import serve
urlpatterns = [
# Examples:
# url(r'^$', 'example_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
- url(r"^admin/", include(admin.site.urls)),
? -------- -
+ url(r"^admin/", admin.site.urls),
url(r"^media/(?P<path>.*)$", serve, {"document_root": settings.MEDIA_ROOT}),
+ url(r"^markitup/", include("markitup.urls")),
url(r"", include("pgallery.urls", namespace="pgallery")),
]
|
b09c7d7c8a0949e6c0a370e7608e51b1060b1ee8
|
meinberlin/apps/mapideas/forms.py
|
meinberlin/apps/mapideas/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from adhocracy4.categories.forms import CategorizableFieldMixin
from adhocracy4.maps import widgets as maps_widgets
from . import models
class MapIdeaForm(CategorizableFieldMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.settings = kwargs.pop('settings_instance')
super().__init__(*args, **kwargs)
self.fields['point'].widget = maps_widgets.MapChoosePointWidget(
polygon=self.settings.polygon)
self.fields['point'].error_messages['required'] = _(
'Please locate your proposal on the map.')
class Meta:
model = models.MapIdea
fields = ['name', 'description', 'category', 'point', 'point_label']
class MapIdeaModerateForm(forms.ModelForm):
class Meta:
model = models.MapIdea
fields = ['moderator_feedback']
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from adhocracy4.categories.forms import CategorizableFieldMixin
from adhocracy4.maps import widgets as maps_widgets
from . import models
class MapIdeaForm(CategorizableFieldMixin, forms.ModelForm):
class Media:
js = ('js/select_dropdown_init.js',)
def __init__(self, *args, **kwargs):
self.settings = kwargs.pop('settings_instance')
super().__init__(*args, **kwargs)
self.fields['point'].widget = maps_widgets.MapChoosePointWidget(
polygon=self.settings.polygon)
self.fields['point'].error_messages['required'] = _(
'Please locate your proposal on the map.')
class Meta:
model = models.MapIdea
fields = ['name', 'description', 'category', 'point', 'point_label']
class MapIdeaModerateForm(forms.ModelForm):
class Meta:
model = models.MapIdea
fields = ['moderator_feedback']
|
Initialize select dropdown for categories with maps
|
Initialize select dropdown for categories with maps
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from adhocracy4.categories.forms import CategorizableFieldMixin
from adhocracy4.maps import widgets as maps_widgets
from . import models
class MapIdeaForm(CategorizableFieldMixin, forms.ModelForm):
+ class Media:
+ js = ('js/select_dropdown_init.js',)
def __init__(self, *args, **kwargs):
self.settings = kwargs.pop('settings_instance')
super().__init__(*args, **kwargs)
self.fields['point'].widget = maps_widgets.MapChoosePointWidget(
polygon=self.settings.polygon)
self.fields['point'].error_messages['required'] = _(
'Please locate your proposal on the map.')
class Meta:
model = models.MapIdea
fields = ['name', 'description', 'category', 'point', 'point_label']
class MapIdeaModerateForm(forms.ModelForm):
class Meta:
model = models.MapIdea
fields = ['moderator_feedback']
|
Initialize select dropdown for categories with maps
|
## Code Before:
from django import forms
from django.utils.translation import ugettext_lazy as _
from adhocracy4.categories.forms import CategorizableFieldMixin
from adhocracy4.maps import widgets as maps_widgets
from . import models
class MapIdeaForm(CategorizableFieldMixin, forms.ModelForm):
def __init__(self, *args, **kwargs):
self.settings = kwargs.pop('settings_instance')
super().__init__(*args, **kwargs)
self.fields['point'].widget = maps_widgets.MapChoosePointWidget(
polygon=self.settings.polygon)
self.fields['point'].error_messages['required'] = _(
'Please locate your proposal on the map.')
class Meta:
model = models.MapIdea
fields = ['name', 'description', 'category', 'point', 'point_label']
class MapIdeaModerateForm(forms.ModelForm):
class Meta:
model = models.MapIdea
fields = ['moderator_feedback']
## Instruction:
Initialize select dropdown for categories with maps
## Code After:
from django import forms
from django.utils.translation import ugettext_lazy as _
from adhocracy4.categories.forms import CategorizableFieldMixin
from adhocracy4.maps import widgets as maps_widgets
from . import models
class MapIdeaForm(CategorizableFieldMixin, forms.ModelForm):
class Media:
js = ('js/select_dropdown_init.js',)
def __init__(self, *args, **kwargs):
self.settings = kwargs.pop('settings_instance')
super().__init__(*args, **kwargs)
self.fields['point'].widget = maps_widgets.MapChoosePointWidget(
polygon=self.settings.polygon)
self.fields['point'].error_messages['required'] = _(
'Please locate your proposal on the map.')
class Meta:
model = models.MapIdea
fields = ['name', 'description', 'category', 'point', 'point_label']
class MapIdeaModerateForm(forms.ModelForm):
class Meta:
model = models.MapIdea
fields = ['moderator_feedback']
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from adhocracy4.categories.forms import CategorizableFieldMixin
from adhocracy4.maps import widgets as maps_widgets
from . import models
class MapIdeaForm(CategorizableFieldMixin, forms.ModelForm):
+ class Media:
+ js = ('js/select_dropdown_init.js',)
def __init__(self, *args, **kwargs):
self.settings = kwargs.pop('settings_instance')
super().__init__(*args, **kwargs)
self.fields['point'].widget = maps_widgets.MapChoosePointWidget(
polygon=self.settings.polygon)
self.fields['point'].error_messages['required'] = _(
'Please locate your proposal on the map.')
class Meta:
model = models.MapIdea
fields = ['name', 'description', 'category', 'point', 'point_label']
class MapIdeaModerateForm(forms.ModelForm):
class Meta:
model = models.MapIdea
fields = ['moderator_feedback']
|
3dbc981e62c2d153913557b62083f60888fa7e83
|
ynr/apps/ynr_refactoring/management/commands/ynr_refactoring_remove_legacy_IDs.py
|
ynr/apps/ynr_refactoring/management/commands/ynr_refactoring_remove_legacy_IDs.py
|
import json
from django.core.management.base import BaseCommand
from django.db import transaction
from people.models import Person
from candidates.views.version_data import get_change_metadata
from popolo.models import Identifier
class Command(BaseCommand):
def handle(self, *args, **options):
schemes = ("yournextmp-candidate", "popit-person")
# We can't use the GFK any more because we just deleted it, but the
# content is still there
identifiers = Identifier.objects.filter(scheme__in=schemes).values_list(
"object_id", flat=True
)
for person in Person.objects.filter(pk__in=identifiers).filter(pk=502):
with transaction.atomic():
meta_data = get_change_metadata(
None, "Removing legacy identifiers"
)
meta_data["username"] = "CandidateBot"
person.record_version(meta_data)
person.save()
|
import json
from django.core.management.base import BaseCommand
from django.db import transaction
from people.models import Person
from candidates.views.version_data import get_change_metadata
from popolo.models import Identifier
class Command(BaseCommand):
def handle(self, *args, **options):
schemes = ("yournextmp-candidate", "popit-person")
# We can't use the GFK any more because we just deleted it, but the
# content is still there
identifiers = Identifier.objects.filter(scheme__in=schemes).values_list(
"object_id", flat=True
)
for person in Person.objects.filter(pk__in=identifiers):
with transaction.atomic():
meta_data = get_change_metadata(
None, "Removing legacy identifiers"
)
meta_data["username"] = "CandidateBot"
person.record_version(meta_data)
person.save()
|
Remove IDs for all candidates, not just Zac
|
Remove IDs for all candidates, not just Zac
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
import json
from django.core.management.base import BaseCommand
from django.db import transaction
from people.models import Person
from candidates.views.version_data import get_change_metadata
from popolo.models import Identifier
class Command(BaseCommand):
def handle(self, *args, **options):
schemes = ("yournextmp-candidate", "popit-person")
# We can't use the GFK any more because we just deleted it, but the
# content is still there
identifiers = Identifier.objects.filter(scheme__in=schemes).values_list(
"object_id", flat=True
)
- for person in Person.objects.filter(pk__in=identifiers).filter(pk=502):
+ for person in Person.objects.filter(pk__in=identifiers):
with transaction.atomic():
meta_data = get_change_metadata(
None, "Removing legacy identifiers"
)
meta_data["username"] = "CandidateBot"
person.record_version(meta_data)
person.save()
|
Remove IDs for all candidates, not just Zac
|
## Code Before:
import json
from django.core.management.base import BaseCommand
from django.db import transaction
from people.models import Person
from candidates.views.version_data import get_change_metadata
from popolo.models import Identifier
class Command(BaseCommand):
def handle(self, *args, **options):
schemes = ("yournextmp-candidate", "popit-person")
# We can't use the GFK any more because we just deleted it, but the
# content is still there
identifiers = Identifier.objects.filter(scheme__in=schemes).values_list(
"object_id", flat=True
)
for person in Person.objects.filter(pk__in=identifiers).filter(pk=502):
with transaction.atomic():
meta_data = get_change_metadata(
None, "Removing legacy identifiers"
)
meta_data["username"] = "CandidateBot"
person.record_version(meta_data)
person.save()
## Instruction:
Remove IDs for all candidates, not just Zac
## Code After:
import json
from django.core.management.base import BaseCommand
from django.db import transaction
from people.models import Person
from candidates.views.version_data import get_change_metadata
from popolo.models import Identifier
class Command(BaseCommand):
def handle(self, *args, **options):
schemes = ("yournextmp-candidate", "popit-person")
# We can't use the GFK any more because we just deleted it, but the
# content is still there
identifiers = Identifier.objects.filter(scheme__in=schemes).values_list(
"object_id", flat=True
)
for person in Person.objects.filter(pk__in=identifiers):
with transaction.atomic():
meta_data = get_change_metadata(
None, "Removing legacy identifiers"
)
meta_data["username"] = "CandidateBot"
person.record_version(meta_data)
person.save()
|
import json
from django.core.management.base import BaseCommand
from django.db import transaction
from people.models import Person
from candidates.views.version_data import get_change_metadata
from popolo.models import Identifier
class Command(BaseCommand):
def handle(self, *args, **options):
schemes = ("yournextmp-candidate", "popit-person")
# We can't use the GFK any more because we just deleted it, but the
# content is still there
identifiers = Identifier.objects.filter(scheme__in=schemes).values_list(
"object_id", flat=True
)
- for person in Person.objects.filter(pk__in=identifiers).filter(pk=502):
? ---------------
+ for person in Person.objects.filter(pk__in=identifiers):
with transaction.atomic():
meta_data = get_change_metadata(
None, "Removing legacy identifiers"
)
meta_data["username"] = "CandidateBot"
person.record_version(meta_data)
person.save()
|
830e1a23559b82af37f52657484edd20641318c5
|
teamsupport/__init__.py
|
teamsupport/__init__.py
|
from __future__ import absolute_import
from teamsupport.services import TeamSupportService
__author__ = 'Yola Engineers'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = (TeamSupportService,)
|
from __future__ import absolute_import
from teamsupport.models import Action, Ticket
from teamsupport.services import TeamSupportService
__author__ = 'Yola Engineers'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = (Action, TeamSupportService, Ticket,)
|
Add convenience imports for models
|
Add convenience imports for models
|
Python
|
mit
|
zoidbergwill/teamsupport-python,yola/teamsupport-python
|
from __future__ import absolute_import
+ from teamsupport.models import Action, Ticket
from teamsupport.services import TeamSupportService
__author__ = 'Yola Engineers'
__email__ = '[email protected]'
__version__ = '0.1.0'
- __all__ = (TeamSupportService,)
+ __all__ = (Action, TeamSupportService, Ticket,)
|
Add convenience imports for models
|
## Code Before:
from __future__ import absolute_import
from teamsupport.services import TeamSupportService
__author__ = 'Yola Engineers'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = (TeamSupportService,)
## Instruction:
Add convenience imports for models
## Code After:
from __future__ import absolute_import
from teamsupport.models import Action, Ticket
from teamsupport.services import TeamSupportService
__author__ = 'Yola Engineers'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = (Action, TeamSupportService, Ticket,)
|
from __future__ import absolute_import
+ from teamsupport.models import Action, Ticket
from teamsupport.services import TeamSupportService
__author__ = 'Yola Engineers'
__email__ = '[email protected]'
__version__ = '0.1.0'
- __all__ = (TeamSupportService,)
+ __all__ = (Action, TeamSupportService, Ticket,)
? ++++++++ ++++++++
|
a555737e2d594a67078a15be9d5eb3c8524d0698
|
app/models.py
|
app/models.py
|
from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)
|
from . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)
|
Add password hash to Monkey model
|
Add password hash to Monkey model
|
Python
|
mit
|
timzdevz/fm-flask-app
|
from . import db
+ from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
+ password_hash = db.Column(db.String(128))
- age = db.Column(db.Date())
+ birth_date = db.Column(db.Date())
+
+ @property
+ def password(self):
+ raise AttributeError('password is not a readable attribute')
+
+ @password.setter
+ def password(self, password):
+ self.password_hash = generate_password_hash(password)
+
+ def verify_password(self, password):
+ return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)
|
Add password hash to Monkey model
|
## Code Before:
from . import db
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
age = db.Column(db.Date())
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)
## Instruction:
Add password hash to Monkey model
## Code After:
from . import db
from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
birth_date = db.Column(db.Date())
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)
|
from . import db
+ from werkzeug.security import generate_password_hash, check_password_hash
class Monkey(db.Model):
__tablename__ = 'monkeys'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
email = db.Column(db.String(64), unique=True)
+ password_hash = db.Column(db.String(128))
- age = db.Column(db.Date())
? ^
+ birth_date = db.Column(db.Date())
? +++++++ ^
+
+ @property
+ def password(self):
+ raise AttributeError('password is not a readable attribute')
+
+ @password.setter
+ def password(self, password):
+ self.password_hash = generate_password_hash(password)
+
+ def verify_password(self, password):
+ return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User {} {}>'.format(self.id, self.email)
|
d2e82419a8f1b7ead32a43e6a03ebe8093374840
|
opps/channels/forms.py
|
opps/channels/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
class Meta:
model = Channel
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
def __init__(self, *args, **kwargs):
super(ChannelAdminForm, self).__init__(*args, **kwargs)
instance = getattr(self, 'instance', None)
if instance and instance.pk:
self.fields['slug'].widget.attrs['readonly'] = True
class Meta:
model = Channel
|
Set slug field readonly after channel create
|
Set slug field readonly after channel create
|
Python
|
mit
|
williamroot/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
+ def __init__(self, *args, **kwargs):
+ super(ChannelAdminForm, self).__init__(*args, **kwargs)
+ instance = getattr(self, 'instance', None)
+ if instance and instance.pk:
+ self.fields['slug'].widget.attrs['readonly'] = True
+
class Meta:
model = Channel
|
Set slug field readonly after channel create
|
## Code Before:
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
class Meta:
model = Channel
## Instruction:
Set slug field readonly after channel create
## Code After:
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
def __init__(self, *args, **kwargs):
super(ChannelAdminForm, self).__init__(*args, **kwargs)
instance = getattr(self, 'instance', None)
if instance and instance.pk:
self.fields['slug'].widget.attrs['readonly'] = True
class Meta:
model = Channel
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Channel
class ChannelAdminForm(forms.ModelForm):
layout = forms.ChoiceField(choices=(('default', _('Default')),))
+ def __init__(self, *args, **kwargs):
+ super(ChannelAdminForm, self).__init__(*args, **kwargs)
+ instance = getattr(self, 'instance', None)
+ if instance and instance.pk:
+ self.fields['slug'].widget.attrs['readonly'] = True
+
class Meta:
model = Channel
|
3418b1ef4ade19ccddef92ec059d1629969d8cef
|
src/lander/ext/parser/_parser.py
|
src/lander/ext/parser/_parser.py
|
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
from lander.ext.parser._datamodel import DocumentMetadata
from lander.ext.parser.texutils.normalize import read_tex_file
if TYPE_CHECKING:
from pathlib import Path
__all__ = ["Parser"]
class Parser(metaclass=ABCMeta):
"""Base class for TeX document metadata parsing extensions.
Parameters
----------
tex_path
Path to the root tex document.
"""
def __init__(self, tex_path: Path) -> None:
self._tex_path = tex_path
self._tex_source = read_tex_file(self.tex_path)
self._metadata = self.extract_metadata(self.tex_source)
@property
def tex_path(self) -> Path:
""""Path to the root TeX source file."""
return self._tex_path
@property
def tex_source(self) -> str:
"""TeX source, which has been normalized."""
return self._tex_source
@property
def metadata(self) -> DocumentMetadata:
"""Metadata about the document."""
return self._metadata
@abstractmethod
def extract_metadata(self, tex_source: str) -> DocumentMetadata:
"""Hook for implementing metadata extraction."""
raise NotImplementedError
|
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
from lander.ext.parser._datamodel import DocumentMetadata
from lander.ext.parser.texutils.extract import get_macros
from lander.ext.parser.texutils.normalize import read_tex_file, replace_macros
if TYPE_CHECKING:
from pathlib import Path
__all__ = ["Parser"]
class Parser(metaclass=ABCMeta):
"""Base class for TeX document metadata parsing extensions.
Parameters
----------
tex_path
Path to the root tex document.
"""
def __init__(self, tex_path: Path) -> None:
self._tex_path = tex_path
self._tex_source = self.normalize_source(read_tex_file(self.tex_path))
self._metadata = self.extract_metadata(self.tex_source)
@property
def tex_path(self) -> Path:
""""Path to the root TeX source file."""
return self._tex_path
@property
def tex_source(self) -> str:
"""TeX source, which has been normalized."""
return self._tex_source
@property
def metadata(self) -> DocumentMetadata:
"""Metadata about the document."""
return self._metadata
def normalize_source(self, tex_source: str) -> str:
"""Process the TeX source after it is read, but before metadata
is extracted.
Parameters
----------
tex_source
TeX source content.
Returns
-------
tex_source
Normalized TeX source content.
"""
macros = get_macros(tex_source)
return replace_macros(tex_source, macros)
@abstractmethod
def extract_metadata(self, tex_source: str) -> DocumentMetadata:
"""Hook for implementing metadata extraction.
Parameters
----------
tex_source
TeX source content.
Returns
-------
metadata
The metadata parsed from the document source.
"""
raise NotImplementedError
|
Add normalize_source hook for parsers
|
Add normalize_source hook for parsers
By default, this hook will replace macros (such as \newcommand) with
their content. Parser implementations can do additional work to
normalize/resolve TeX content.
|
Python
|
mit
|
lsst-sqre/lander,lsst-sqre/lander
|
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
from lander.ext.parser._datamodel import DocumentMetadata
+ from lander.ext.parser.texutils.extract import get_macros
- from lander.ext.parser.texutils.normalize import read_tex_file
+ from lander.ext.parser.texutils.normalize import read_tex_file, replace_macros
if TYPE_CHECKING:
from pathlib import Path
__all__ = ["Parser"]
class Parser(metaclass=ABCMeta):
"""Base class for TeX document metadata parsing extensions.
Parameters
----------
tex_path
Path to the root tex document.
"""
def __init__(self, tex_path: Path) -> None:
self._tex_path = tex_path
- self._tex_source = read_tex_file(self.tex_path)
+ self._tex_source = self.normalize_source(read_tex_file(self.tex_path))
self._metadata = self.extract_metadata(self.tex_source)
@property
def tex_path(self) -> Path:
""""Path to the root TeX source file."""
return self._tex_path
@property
def tex_source(self) -> str:
"""TeX source, which has been normalized."""
return self._tex_source
@property
def metadata(self) -> DocumentMetadata:
"""Metadata about the document."""
return self._metadata
+ def normalize_source(self, tex_source: str) -> str:
+ """Process the TeX source after it is read, but before metadata
+ is extracted.
+
+ Parameters
+ ----------
+ tex_source
+ TeX source content.
+
+ Returns
+ -------
+ tex_source
+ Normalized TeX source content.
+ """
+ macros = get_macros(tex_source)
+ return replace_macros(tex_source, macros)
+
@abstractmethod
def extract_metadata(self, tex_source: str) -> DocumentMetadata:
- """Hook for implementing metadata extraction."""
+ """Hook for implementing metadata extraction.
+
+ Parameters
+ ----------
+ tex_source
+ TeX source content.
+
+ Returns
+ -------
+ metadata
+ The metadata parsed from the document source.
+ """
raise NotImplementedError
|
Add normalize_source hook for parsers
|
## Code Before:
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
from lander.ext.parser._datamodel import DocumentMetadata
from lander.ext.parser.texutils.normalize import read_tex_file
if TYPE_CHECKING:
from pathlib import Path
__all__ = ["Parser"]
class Parser(metaclass=ABCMeta):
"""Base class for TeX document metadata parsing extensions.
Parameters
----------
tex_path
Path to the root tex document.
"""
def __init__(self, tex_path: Path) -> None:
self._tex_path = tex_path
self._tex_source = read_tex_file(self.tex_path)
self._metadata = self.extract_metadata(self.tex_source)
@property
def tex_path(self) -> Path:
""""Path to the root TeX source file."""
return self._tex_path
@property
def tex_source(self) -> str:
"""TeX source, which has been normalized."""
return self._tex_source
@property
def metadata(self) -> DocumentMetadata:
"""Metadata about the document."""
return self._metadata
@abstractmethod
def extract_metadata(self, tex_source: str) -> DocumentMetadata:
"""Hook for implementing metadata extraction."""
raise NotImplementedError
## Instruction:
Add normalize_source hook for parsers
## Code After:
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
from lander.ext.parser._datamodel import DocumentMetadata
from lander.ext.parser.texutils.extract import get_macros
from lander.ext.parser.texutils.normalize import read_tex_file, replace_macros
if TYPE_CHECKING:
from pathlib import Path
__all__ = ["Parser"]
class Parser(metaclass=ABCMeta):
"""Base class for TeX document metadata parsing extensions.
Parameters
----------
tex_path
Path to the root tex document.
"""
def __init__(self, tex_path: Path) -> None:
self._tex_path = tex_path
self._tex_source = self.normalize_source(read_tex_file(self.tex_path))
self._metadata = self.extract_metadata(self.tex_source)
@property
def tex_path(self) -> Path:
""""Path to the root TeX source file."""
return self._tex_path
@property
def tex_source(self) -> str:
"""TeX source, which has been normalized."""
return self._tex_source
@property
def metadata(self) -> DocumentMetadata:
"""Metadata about the document."""
return self._metadata
def normalize_source(self, tex_source: str) -> str:
"""Process the TeX source after it is read, but before metadata
is extracted.
Parameters
----------
tex_source
TeX source content.
Returns
-------
tex_source
Normalized TeX source content.
"""
macros = get_macros(tex_source)
return replace_macros(tex_source, macros)
@abstractmethod
def extract_metadata(self, tex_source: str) -> DocumentMetadata:
"""Hook for implementing metadata extraction.
Parameters
----------
tex_source
TeX source content.
Returns
-------
metadata
The metadata parsed from the document source.
"""
raise NotImplementedError
|
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
from lander.ext.parser._datamodel import DocumentMetadata
+ from lander.ext.parser.texutils.extract import get_macros
- from lander.ext.parser.texutils.normalize import read_tex_file
+ from lander.ext.parser.texutils.normalize import read_tex_file, replace_macros
? ++++++++++++++++
if TYPE_CHECKING:
from pathlib import Path
__all__ = ["Parser"]
class Parser(metaclass=ABCMeta):
"""Base class for TeX document metadata parsing extensions.
Parameters
----------
tex_path
Path to the root tex document.
"""
def __init__(self, tex_path: Path) -> None:
self._tex_path = tex_path
- self._tex_source = read_tex_file(self.tex_path)
+ self._tex_source = self.normalize_source(read_tex_file(self.tex_path))
? ++++++++++++++++++++++ +
self._metadata = self.extract_metadata(self.tex_source)
@property
def tex_path(self) -> Path:
""""Path to the root TeX source file."""
return self._tex_path
@property
def tex_source(self) -> str:
"""TeX source, which has been normalized."""
return self._tex_source
@property
def metadata(self) -> DocumentMetadata:
"""Metadata about the document."""
return self._metadata
+ def normalize_source(self, tex_source: str) -> str:
+ """Process the TeX source after it is read, but before metadata
+ is extracted.
+
+ Parameters
+ ----------
+ tex_source
+ TeX source content.
+
+ Returns
+ -------
+ tex_source
+ Normalized TeX source content.
+ """
+ macros = get_macros(tex_source)
+ return replace_macros(tex_source, macros)
+
@abstractmethod
def extract_metadata(self, tex_source: str) -> DocumentMetadata:
- """Hook for implementing metadata extraction."""
? ---
+ """Hook for implementing metadata extraction.
+
+ Parameters
+ ----------
+ tex_source
+ TeX source content.
+
+ Returns
+ -------
+ metadata
+ The metadata parsed from the document source.
+ """
raise NotImplementedError
|
5c70751806c69bded77821b87d728821e37152c8
|
web/server.py
|
web/server.py
|
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, jsonify, make_response
import json
from decorators import Monitor
app = Flask(__name__)
#app.wsgi_app = WSGIApplication(app.config['APPINSIGHTS_INSTRUMENTATION_KEY'], app.wsgi_app)
@app.route('/')
@Monitor.api()
def index():
return 'Newsalyzer'
@app.route('/get-sentiment', endpoint='get_sentiment')
@Monitor.api()
def get_sentiment():
from google.cloud import language
language_client = language.Client()
text = request.args['text']
document = language_client.document_from_text(text)
# Detects the sentiment of the text
sentiment = document.analyze_sentiment().sentiment
entity_response = document.analyze_entities()
response = {
'score' : sentiment.score,
'magnitude' : sentiment.magnitude,
'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment } for e in entity_response.entities]
}
return json.dumps(response), 200, {'ContentType':'application/json'}
if __name__=='__main__':
app.run()
|
import os
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, jsonify, make_response
import json
from google.cloud import language
from decorators import Monitor
from blob_storage import BlobStorage
app = Flask(__name__)
#app.wsgi_app = WSGIApplication(app.config['APPINSIGHTS_INSTRUMENTATION_KEY'], app.wsgi_app)
if os.environ.has_key('STORAGE_ACCOUNT_NAME'):
local_key_file = 'private/google-nlp-key.json'
blob_storage = BlobStorage(os.environ['STORAGE_ACCOUNT_NAME'], os.environ['STORAGE_ACCOUNT_KEY'])
blob_storage.download_file('private', 'google-nlp-key.json', local_key_file)
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = local_key_file
@app.route('/')
@Monitor.api()
def index():
return 'Newsalyzer'
@app.route('/get-sentiment', endpoint='get_sentiment')
@Monitor.api()
def get_sentiment():
language_client = language.Client(api_version='v1beta2')
document = language_client.document_from_text(request.args['text'])
# Detects the sentiment of the text
sentiment = document.analyze_sentiment().sentiment
entity_response = document.analyze_entity_sentiment()
response = {
'score' : sentiment.score,
'magnitude' : sentiment.magnitude,
'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment.score, 'magnitude': e.sentiment.magnitude } for e in entity_response.entities]
}
return json.dumps(response), 200, {'ContentType':'application/json'}
if __name__=='__main__':
app.run()
|
Fix bugs in sentiment analysis code so entity sentiment is returned
|
Fix bugs in sentiment analysis code so entity sentiment is returned
|
Python
|
mit
|
harigov/newsalyzer,harigov/newsalyzer,harigov/newsalyzer
|
+ import os
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, jsonify, make_response
import json
+ from google.cloud import language
+
from decorators import Monitor
+ from blob_storage import BlobStorage
app = Flask(__name__)
#app.wsgi_app = WSGIApplication(app.config['APPINSIGHTS_INSTRUMENTATION_KEY'], app.wsgi_app)
+
+ if os.environ.has_key('STORAGE_ACCOUNT_NAME'):
+ local_key_file = 'private/google-nlp-key.json'
+ blob_storage = BlobStorage(os.environ['STORAGE_ACCOUNT_NAME'], os.environ['STORAGE_ACCOUNT_KEY'])
+ blob_storage.download_file('private', 'google-nlp-key.json', local_key_file)
+ os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = local_key_file
@app.route('/')
@Monitor.api()
def index():
return 'Newsalyzer'
@app.route('/get-sentiment', endpoint='get_sentiment')
@Monitor.api()
def get_sentiment():
- from google.cloud import language
- language_client = language.Client()
+ language_client = language.Client(api_version='v1beta2')
- text = request.args['text']
- document = language_client.document_from_text(text)
+ document = language_client.document_from_text(request.args['text'])
# Detects the sentiment of the text
sentiment = document.analyze_sentiment().sentiment
- entity_response = document.analyze_entities()
+ entity_response = document.analyze_entity_sentiment()
response = {
'score' : sentiment.score,
'magnitude' : sentiment.magnitude,
- 'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment } for e in entity_response.entities]
+ 'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment.score, 'magnitude': e.sentiment.magnitude } for e in entity_response.entities]
}
+
return json.dumps(response), 200, {'ContentType':'application/json'}
if __name__=='__main__':
app.run()
|
Fix bugs in sentiment analysis code so entity sentiment is returned
|
## Code Before:
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, jsonify, make_response
import json
from decorators import Monitor
app = Flask(__name__)
#app.wsgi_app = WSGIApplication(app.config['APPINSIGHTS_INSTRUMENTATION_KEY'], app.wsgi_app)
@app.route('/')
@Monitor.api()
def index():
return 'Newsalyzer'
@app.route('/get-sentiment', endpoint='get_sentiment')
@Monitor.api()
def get_sentiment():
from google.cloud import language
language_client = language.Client()
text = request.args['text']
document = language_client.document_from_text(text)
# Detects the sentiment of the text
sentiment = document.analyze_sentiment().sentiment
entity_response = document.analyze_entities()
response = {
'score' : sentiment.score,
'magnitude' : sentiment.magnitude,
'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment } for e in entity_response.entities]
}
return json.dumps(response), 200, {'ContentType':'application/json'}
if __name__=='__main__':
app.run()
## Instruction:
Fix bugs in sentiment analysis code so entity sentiment is returned
## Code After:
import os
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, jsonify, make_response
import json
from google.cloud import language
from decorators import Monitor
from blob_storage import BlobStorage
app = Flask(__name__)
#app.wsgi_app = WSGIApplication(app.config['APPINSIGHTS_INSTRUMENTATION_KEY'], app.wsgi_app)
if os.environ.has_key('STORAGE_ACCOUNT_NAME'):
local_key_file = 'private/google-nlp-key.json'
blob_storage = BlobStorage(os.environ['STORAGE_ACCOUNT_NAME'], os.environ['STORAGE_ACCOUNT_KEY'])
blob_storage.download_file('private', 'google-nlp-key.json', local_key_file)
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = local_key_file
@app.route('/')
@Monitor.api()
def index():
return 'Newsalyzer'
@app.route('/get-sentiment', endpoint='get_sentiment')
@Monitor.api()
def get_sentiment():
language_client = language.Client(api_version='v1beta2')
document = language_client.document_from_text(request.args['text'])
# Detects the sentiment of the text
sentiment = document.analyze_sentiment().sentiment
entity_response = document.analyze_entity_sentiment()
response = {
'score' : sentiment.score,
'magnitude' : sentiment.magnitude,
'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment.score, 'magnitude': e.sentiment.magnitude } for e in entity_response.entities]
}
return json.dumps(response), 200, {'ContentType':'application/json'}
if __name__=='__main__':
app.run()
|
+ import os
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, jsonify, make_response
import json
+ from google.cloud import language
+
from decorators import Monitor
+ from blob_storage import BlobStorage
app = Flask(__name__)
#app.wsgi_app = WSGIApplication(app.config['APPINSIGHTS_INSTRUMENTATION_KEY'], app.wsgi_app)
+
+ if os.environ.has_key('STORAGE_ACCOUNT_NAME'):
+ local_key_file = 'private/google-nlp-key.json'
+ blob_storage = BlobStorage(os.environ['STORAGE_ACCOUNT_NAME'], os.environ['STORAGE_ACCOUNT_KEY'])
+ blob_storage.download_file('private', 'google-nlp-key.json', local_key_file)
+ os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = local_key_file
@app.route('/')
@Monitor.api()
def index():
return 'Newsalyzer'
@app.route('/get-sentiment', endpoint='get_sentiment')
@Monitor.api()
def get_sentiment():
- from google.cloud import language
- language_client = language.Client()
+ language_client = language.Client(api_version='v1beta2')
? +++++++++++++++++++++
- text = request.args['text']
- document = language_client.document_from_text(text)
+ document = language_client.document_from_text(request.args['text'])
? ++++++++++++++ ++
# Detects the sentiment of the text
sentiment = document.analyze_sentiment().sentiment
- entity_response = document.analyze_entities()
? ^
+ entity_response = document.analyze_entity_sentiment()
? ++++++ + ^^
response = {
'score' : sentiment.score,
'magnitude' : sentiment.magnitude,
- 'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment } for e in entity_response.entities]
+ 'entities' : [ { 'name': e.name, 'type': e.entity_type, 'sentiment' : e.sentiment.score, 'magnitude': e.sentiment.magnitude } for e in entity_response.entities]
? ++++++++++++++++++++++++++++++++++++++++++
}
+
return json.dumps(response), 200, {'ContentType':'application/json'}
if __name__=='__main__':
app.run()
|
d28e884d832b63bef1434476a378de9b7e333264
|
samples/WavGenerator.py
|
samples/WavGenerator.py
|
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
|
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
|
Add a main function with command line arguments
|
Add a main function with command line arguments
Now able to generate wave files from command line
|
Python
|
mit
|
parrisha/raspi-visualizer
|
import wave
+ import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
-
+
- filename = 'Sample_'
+ filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
- filename += str(test_freq) + 'Hz@' + str(test_amp)
+ filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
+
+ if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
+ parser.add_argument('--freqs', nargs='+', type=int)
+ parser.add_argument('--amps', nargs='+', type=int)
+ args = parser.parse_args()
+
+ generate_sample_file(args.freqs, args.amps)
+
|
Add a main function with command line arguments
|
## Code Before:
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
## Instruction:
Add a main function with command line arguments
## Code After:
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
|
import wave
+ import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
-
+
- filename = 'Sample_'
? -
+ filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
- filename += str(test_freq) + 'Hz@' + str(test_amp)
+ filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
? ++++++
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
+
+ if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
+ parser.add_argument('--freqs', nargs='+', type=int)
+ parser.add_argument('--amps', nargs='+', type=int)
+ args = parser.parse_args()
+
+ generate_sample_file(args.freqs, args.amps)
+
|
2c7dc769874766b230bc11c7ec6f67d3c1157005
|
duplicatefiledir/__init__.py
|
duplicatefiledir/__init__.py
|
from fman import DirectoryPaneCommand, show_alert
import distutils
from distutils import dir_util, file_util
import os.path
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
if os.path.isdir(filedir):
#
# It is a directory. Process as a directory.
#
newDir = filedir + "-copy"
distutils.dir_util.copy_tree(filedir,newDir)
else:
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filedir)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
distutils.file_util.copy_file(filedir,nfilenm)
|
from fman import DirectoryPaneCommand, show_alert
from urllib.parse import urlparse
import os.path
from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
p = urlparse(filedir)
filepath = os.path.abspath(os.path.join(p.netloc, p.path))
if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
newDir = filepath + "-copy"
copytree(filepath, newDir)
else:
if os.path.isfile(filepath):
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filepath)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
copyfile(filepath, nfilenm)
else:
show_alert('Bad file path : {0}'.format(filepath))
|
Make it work with last fman version (0.7) on linux
|
Make it work with last fman version (0.7) on linux
|
Python
|
mit
|
raguay/DuplicateFileDir
|
from fman import DirectoryPaneCommand, show_alert
+ from urllib.parse import urlparse
- import distutils
- from distutils import dir_util, file_util
import os.path
+ from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
+ p = urlparse(filedir)
+ filepath = os.path.abspath(os.path.join(p.netloc, p.path))
- if os.path.isdir(filedir):
+ if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
- newDir = filedir + "-copy"
+ newDir = filepath + "-copy"
- distutils.dir_util.copy_tree(filedir,newDir)
+ copytree(filepath, newDir)
else:
+ if os.path.isfile(filepath):
- #
+ #
- # It is a file. Process as a file.
+ # It is a file. Process as a file.
- #
+ #
- dirPath, ofilenmc = os.path.split(filedir)
+ dirPath, ofilenmc = os.path.split(filepath)
- ofilenm, ext = os.path.splitext(ofilenmc)
+ ofilenm, ext = os.path.splitext(ofilenmc)
- nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
+ nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
- distutils.file_util.copy_file(filedir,nfilenm)
+ copyfile(filepath, nfilenm)
+ else:
+ show_alert('Bad file path : {0}'.format(filepath))
|
Make it work with last fman version (0.7) on linux
|
## Code Before:
from fman import DirectoryPaneCommand, show_alert
import distutils
from distutils import dir_util, file_util
import os.path
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
if os.path.isdir(filedir):
#
# It is a directory. Process as a directory.
#
newDir = filedir + "-copy"
distutils.dir_util.copy_tree(filedir,newDir)
else:
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filedir)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
distutils.file_util.copy_file(filedir,nfilenm)
## Instruction:
Make it work with last fman version (0.7) on linux
## Code After:
from fman import DirectoryPaneCommand, show_alert
from urllib.parse import urlparse
import os.path
from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
p = urlparse(filedir)
filepath = os.path.abspath(os.path.join(p.netloc, p.path))
if os.path.isdir(filepath):
#
# It is a directory. Process as a directory.
#
newDir = filepath + "-copy"
copytree(filepath, newDir)
else:
if os.path.isfile(filepath):
#
# It is a file. Process as a file.
#
dirPath, ofilenmc = os.path.split(filepath)
ofilenm, ext = os.path.splitext(ofilenmc)
nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
copyfile(filepath, nfilenm)
else:
show_alert('Bad file path : {0}'.format(filepath))
|
from fman import DirectoryPaneCommand, show_alert
+ from urllib.parse import urlparse
- import distutils
- from distutils import dir_util, file_util
import os.path
+ from shutil import copytree, copyfile
class DuplicateFileDir(DirectoryPaneCommand):
def __call__(self):
selected_files = self.pane.get_selected_files()
if len(selected_files) >= 1 or (len(selected_files) == 0 and self.get_chosen_files()):
if len(selected_files) == 0 and self.get_chosen_files():
selected_files.append(self.get_chosen_files()[0])
#
# Loop through each file/directory selected.
#
for filedir in selected_files:
+ p = urlparse(filedir)
+ filepath = os.path.abspath(os.path.join(p.netloc, p.path))
- if os.path.isdir(filedir):
? ^^^
+ if os.path.isdir(filepath):
? ^^^^
#
# It is a directory. Process as a directory.
#
- newDir = filedir + "-copy"
? ^^^
+ newDir = filepath + "-copy"
? ^^^^
- distutils.dir_util.copy_tree(filedir,newDir)
+ copytree(filepath, newDir)
else:
+ if os.path.isfile(filepath):
- #
+ #
? ++++
- # It is a file. Process as a file.
+ # It is a file. Process as a file.
? ++++
- #
+ #
? ++++
- dirPath, ofilenmc = os.path.split(filedir)
? ^^^
+ dirPath, ofilenmc = os.path.split(filepath)
? ++++ ^^^^
- ofilenm, ext = os.path.splitext(ofilenmc)
+ ofilenm, ext = os.path.splitext(ofilenmc)
? ++++
- nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
+ nfilenm = os.path.join(dirPath,ofilenm + "-copy" + ext)
? ++++
- distutils.file_util.copy_file(filedir,nfilenm)
+ copyfile(filepath, nfilenm)
+ else:
+ show_alert('Bad file path : {0}'.format(filepath))
|
939998db349c364aa0f5ba4705d4feb2da7104d5
|
nn/flags.py
|
nn/flags.py
|
import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
|
import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
|
Fix float type flag definition
|
Fix float type flag definition
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
- tf.app.flags.DEFINE_string("float32", "", "")
+ tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
|
Fix float type flag definition
|
## Code Before:
import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float32", "", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
## Instruction:
Fix float type flag definition
## Code After:
import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
tf.app.flags.DEFINE_string("float-type", "float32", "")
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
|
import functools
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("batch-size", 64, "")
tf.app.flags.DEFINE_float("dropout-prob", 0, "")
tf.app.flags.DEFINE_string("word-file", None, "")
tf.app.flags.DEFINE_integer("num-threads-per-queue", 2, "")
tf.app.flags.DEFINE_integer("queue-capacity", 2, "")
tf.app.flags.DEFINE_string("length-boundaries", "", "")
tf.app.flags.DEFINE_string("rnn-cell", "ln_lstm", "Default RNN cell")
- tf.app.flags.DEFINE_string("float32", "", "")
? ----
+ tf.app.flags.DEFINE_string("float-type", "float32", "")
? ++++++++++++++
@functools.lru_cache()
def words():
with open(tf.app.flags.FLAGS.word_file) as file_:
return sorted([line.strip() for line in file_.readlines()])
@functools.lru_cache()
def word_indices():
# 0 -> null, 1 -> unknown
return { word: index + 2 for index, word in enumerate(flags.words()) }
@functools.lru_cache()
def word_space_size():
return len(words())
def rnn_cell():
from .rnn import cell
return getattr(cell, FLAGS.rnn_cell)
def float_type():
return getattr(tf, FLAGS.float_type)
|
fb07eabac3847a1d454bbe6d663deef6ec47fc9b
|
seo/escaped_fragment/app.py
|
seo/escaped_fragment/app.py
|
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
Fix broken content rendered by PhJS
|
Fix broken content rendered by PhJS
|
Python
|
apache-2.0
|
platformio/platformio-web,orgkhnargh/platformio-web,orgkhnargh/platformio-web,platformio/platformio-web,orgkhnargh/platformio-web
|
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
- while retrynums < 3:
+ while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
+
+ if 'class="ng-scope"' not in response:
+ raise CalledProcessError()
+
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
Fix broken content rendered by PhJS
|
## Code Before:
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
## Instruction:
Fix broken content rendered by PhJS
## Code After:
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
- while retrynums < 3:
? ^
+ while retrynums < 5:
? ^
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
+
+ if 'class="ng-scope"' not in response:
+ raise CalledProcessError()
+
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
63af2d4267f7107232777fa0d8b222dc00f07a90
|
test_setup.py
|
test_setup.py
|
"""Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
|
"""Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
|
Check sysconfig 'scripts' instead of scanning PATH
|
Check sysconfig 'scripts' instead of scanning PATH
|
Python
|
lgpl-2.1
|
dmtucker/backlog
|
"""Test setup.py."""
import os
import subprocess
import sys
+ import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
- assert all(
- any(
- os.path.isfile(
+ assert os.path.isfile(
+ os.path.join(sysconfig.get_path('scripts'), 'backlog'),
- os.path.join(
- directory,
- console_script.partition('=')[0].strip(),
- ),
- )
- for directory in os.environ['PATH'].split(':')
- )
- for console_script in setup.ENTRY_POINTS['console_scripts']
)
|
Check sysconfig 'scripts' instead of scanning PATH
|
## Code Before:
"""Test setup.py."""
import os
import subprocess
import sys
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert all(
any(
os.path.isfile(
os.path.join(
directory,
console_script.partition('=')[0].strip(),
),
)
for directory in os.environ['PATH'].split(':')
)
for console_script in setup.ENTRY_POINTS['console_scripts']
)
## Instruction:
Check sysconfig 'scripts' instead of scanning PATH
## Code After:
"""Test setup.py."""
import os
import subprocess
import sys
import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
assert os.path.isfile(
os.path.join(sysconfig.get_path('scripts'), 'backlog'),
)
|
"""Test setup.py."""
import os
import subprocess
import sys
+ import sysconfig
import setup
def test_setup():
"""Run setup.py check."""
command = [
sys.executable,
setup.__file__,
'check',
'--metadata',
'--strict',
]
assert subprocess.run(command, check=False).returncode == 0
def test_console_scripts():
"""Ensure console scripts were installed correctly."""
- assert all(
- any(
- os.path.isfile(
? ^^^^^^^
+ assert os.path.isfile(
? ^^^^^^
+ os.path.join(sysconfig.get_path('scripts'), 'backlog'),
- os.path.join(
- directory,
- console_script.partition('=')[0].strip(),
- ),
- )
- for directory in os.environ['PATH'].split(':')
- )
- for console_script in setup.ENTRY_POINTS['console_scripts']
)
|
528759e6ba579de185616190e3e514938989a54e
|
tests/console/asciimatics/widgets/testcheckbox.py
|
tests/console/asciimatics/widgets/testcheckbox.py
|
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
Check if checkbox value has updated.
|
Check if checkbox value has updated.
|
Python
|
apache-2.0
|
LowieHuyghe/script-core
|
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
+ self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
Check if checkbox value has updated.
|
## Code Before:
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
## Instruction:
Check if checkbox value has updated.
## Code After:
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
from scriptcore.testing.testcase import TestCase
from scriptcore.console.asciimatics.widgets.checkbox import CheckBox
from asciimatics.widgets import CheckBox as ACheckBox
class TestCheckBox(TestCase):
def test_checkbox(self):
"""
Test the checkbox
:return: void
"""
changed_checkbox = []
def change_handler(checkbox):
changed_checkbox.append(checkbox)
checkbox = CheckBox(self.rand_str(), on_change=change_handler)
self.assert_is_instance(checkbox, ACheckBox)
for value in [True, False, True]:
previous_count = len(changed_checkbox)
checkbox.value = value
+ self.assert_equal(value, checkbox.value)
self.assert_equal(previous_count + 1, len(changed_checkbox))
self.assert_equal(checkbox, changed_checkbox[-1])
|
7dd17cc10f7e0857ab3017177d6c4abeb115ff07
|
south/models.py
|
south/models.py
|
from django.db import models
from south.db import DEFAULT_DB_ALIAS
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
from django.db import models
from south.db import DEFAULT_DB_ALIAS
# If we detect Django 1.7 or higher, then exit
# Placed here so it's guaranteed to be imported on Django start
import django
if django.VERSION[0] > 1 or (django.VERSION[0] == 1 and django.VERSION[1] > 6):
raise RuntimeError("South does not support Django 1.7 or higher. Please use native Django migrations.")
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
Add explicit version check for Django 1.7 or above
|
Add explicit version check for Django 1.7 or above
|
Python
|
apache-2.0
|
smartfile/django-south,smartfile/django-south
|
from django.db import models
from south.db import DEFAULT_DB_ALIAS
+
+ # If we detect Django 1.7 or higher, then exit
+ # Placed here so it's guaranteed to be imported on Django start
+ import django
+ if django.VERSION[0] > 1 or (django.VERSION[0] == 1 and django.VERSION[1] > 6):
+ raise RuntimeError("South does not support Django 1.7 or higher. Please use native Django migrations.")
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
Add explicit version check for Django 1.7 or above
|
## Code Before:
from django.db import models
from south.db import DEFAULT_DB_ALIAS
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
## Instruction:
Add explicit version check for Django 1.7 or above
## Code After:
from django.db import models
from south.db import DEFAULT_DB_ALIAS
# If we detect Django 1.7 or higher, then exit
# Placed here so it's guaranteed to be imported on Django start
import django
if django.VERSION[0] > 1 or (django.VERSION[0] == 1 and django.VERSION[1] > 6):
raise RuntimeError("South does not support Django 1.7 or higher. Please use native Django migrations.")
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
from django.db import models
from south.db import DEFAULT_DB_ALIAS
+
+ # If we detect Django 1.7 or higher, then exit
+ # Placed here so it's guaranteed to be imported on Django start
+ import django
+ if django.VERSION[0] > 1 or (django.VERSION[0] == 1 and django.VERSION[1] > 6):
+ raise RuntimeError("South does not support Django 1.7 or higher. Please use native Django migrations.")
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True)
@classmethod
def for_migration(cls, migration, database):
try:
# Switch on multi-db-ness
if database != DEFAULT_DB_ALIAS:
# Django 1.2
objects = cls.objects.using(database)
else:
# Django <= 1.1
objects = cls.objects
return objects.get(
app_name=migration.app_label(),
migration=migration.name(),
)
except cls.DoesNotExist:
return cls(
app_name=migration.app_label(),
migration=migration.name(),
)
def get_migrations(self):
from south.migration.base import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
def __str__(self):
return "<%s: %s>" % (self.app_name, self.migration)
|
75eb6f93fff381953788a98aac8ee61bbf36c900
|
apps/storybase/templatetags/verbatim.py
|
apps/storybase/templatetags/verbatim.py
|
from django import template
# For Django >= 1.5, use the default verbatim tag implementation
if not hasattr(template.defaulttags, 'verbatim'):
register = template.Library()
class VerbatimNode(template.Node):
def __init__(self, text):
self.text = text
def render(self, context):
return self.text
@register.tag
def verbatim(parser, token):
text = []
while 1:
token = parser.tokens.pop(0)
if token.contents == 'endverbatim':
break
if token.token_type == template.TOKEN_VAR:
text.append('{{')
elif token.token_type == template.TOKEN_BLOCK:
text.append('{%')
text.append(token.contents)
if token.token_type == template.TOKEN_VAR:
text.append('}}')
elif token.token_type == template.TOKEN_BLOCK:
text.append('%}')
return VerbatimNode(''.join(text))
|
from django import template
register = template.Library()
# For Django >= 1.5, use the default verbatim tag implementation
if not hasattr(template.defaulttags, 'verbatim'):
class VerbatimNode(template.Node):
def __init__(self, text):
self.text = text
def render(self, context):
return self.text
@register.tag
def verbatim(parser, token):
text = []
while 1:
token = parser.tokens.pop(0)
if token.contents == 'endverbatim':
break
if token.token_type == template.TOKEN_VAR:
text.append('{{')
elif token.token_type == template.TOKEN_BLOCK:
text.append('{%')
text.append(token.contents)
if token.token_type == template.TOKEN_VAR:
text.append('}}')
elif token.token_type == template.TOKEN_BLOCK:
text.append('%}')
return VerbatimNode(''.join(text))
|
Move register outside of guard
|
Move register outside of guard
Even if we don't load the ``verbatim`` tag backbport, the module
still needs to have a ``register`` variable.
Addresses #660
|
Python
|
mit
|
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
|
from django import template
+ register = template.Library()
+
# For Django >= 1.5, use the default verbatim tag implementation
if not hasattr(template.defaulttags, 'verbatim'):
- register = template.Library()
-
class VerbatimNode(template.Node):
def __init__(self, text):
self.text = text
def render(self, context):
return self.text
@register.tag
def verbatim(parser, token):
text = []
while 1:
token = parser.tokens.pop(0)
if token.contents == 'endverbatim':
break
if token.token_type == template.TOKEN_VAR:
text.append('{{')
elif token.token_type == template.TOKEN_BLOCK:
text.append('{%')
text.append(token.contents)
if token.token_type == template.TOKEN_VAR:
text.append('}}')
elif token.token_type == template.TOKEN_BLOCK:
text.append('%}')
return VerbatimNode(''.join(text))
|
Move register outside of guard
|
## Code Before:
from django import template
# For Django >= 1.5, use the default verbatim tag implementation
if not hasattr(template.defaulttags, 'verbatim'):
register = template.Library()
class VerbatimNode(template.Node):
def __init__(self, text):
self.text = text
def render(self, context):
return self.text
@register.tag
def verbatim(parser, token):
text = []
while 1:
token = parser.tokens.pop(0)
if token.contents == 'endverbatim':
break
if token.token_type == template.TOKEN_VAR:
text.append('{{')
elif token.token_type == template.TOKEN_BLOCK:
text.append('{%')
text.append(token.contents)
if token.token_type == template.TOKEN_VAR:
text.append('}}')
elif token.token_type == template.TOKEN_BLOCK:
text.append('%}')
return VerbatimNode(''.join(text))
## Instruction:
Move register outside of guard
## Code After:
from django import template
register = template.Library()
# For Django >= 1.5, use the default verbatim tag implementation
if not hasattr(template.defaulttags, 'verbatim'):
class VerbatimNode(template.Node):
def __init__(self, text):
self.text = text
def render(self, context):
return self.text
@register.tag
def verbatim(parser, token):
text = []
while 1:
token = parser.tokens.pop(0)
if token.contents == 'endverbatim':
break
if token.token_type == template.TOKEN_VAR:
text.append('{{')
elif token.token_type == template.TOKEN_BLOCK:
text.append('{%')
text.append(token.contents)
if token.token_type == template.TOKEN_VAR:
text.append('}}')
elif token.token_type == template.TOKEN_BLOCK:
text.append('%}')
return VerbatimNode(''.join(text))
|
from django import template
+ register = template.Library()
+
# For Django >= 1.5, use the default verbatim tag implementation
if not hasattr(template.defaulttags, 'verbatim'):
- register = template.Library()
-
class VerbatimNode(template.Node):
def __init__(self, text):
self.text = text
def render(self, context):
return self.text
@register.tag
def verbatim(parser, token):
text = []
while 1:
token = parser.tokens.pop(0)
if token.contents == 'endverbatim':
break
if token.token_type == template.TOKEN_VAR:
text.append('{{')
elif token.token_type == template.TOKEN_BLOCK:
text.append('{%')
text.append(token.contents)
if token.token_type == template.TOKEN_VAR:
text.append('}}')
elif token.token_type == template.TOKEN_BLOCK:
text.append('%}')
return VerbatimNode(''.join(text))
|
7c6a128b707db738d0a89c2897b35ed7d783ade0
|
plugins/basic_info_plugin.py
|
plugins/basic_info_plugin.py
|
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
name = 'BasicInfoPlugin'
short_description = 'Basic info:'
default = True
description = textwrap.dedent('''
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes
''')
def handle(self):
result = ''
for s in self.args['STRING']:
if len(self.args['STRING']) > 1:
result += '{0}:\n'.format(s)
table = VeryPrettyTable()
table.field_names = ['Length', '# Digits', '# Alpha', '# unprintable']
table.add_row((len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.printable for x in s)))
result += str(table) + '\n'
return result
|
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
name = 'BasicInfoPlugin'
short_description = 'Basic info:'
default = True
description = textwrap.dedent('''
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes
''')
def handle(self):
result = ''
for s in self.args['STRING']:
if len(self.args['STRING']) > 1:
result += '{0}:\n'.format(s)
table = VeryPrettyTable()
table.field_names = ['Length', '# Digits', '# Alpha', '# Punct.', '# Control']
table.add_row((len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
result += str(table) + '\n'
return result
|
Add punctuation to basic info
|
Add punctuation to basic info
|
Python
|
mit
|
Sakartu/stringinfo
|
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
name = 'BasicInfoPlugin'
short_description = 'Basic info:'
default = True
description = textwrap.dedent('''
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes
''')
def handle(self):
result = ''
for s in self.args['STRING']:
if len(self.args['STRING']) > 1:
result += '{0}:\n'.format(s)
table = VeryPrettyTable()
- table.field_names = ['Length', '# Digits', '# Alpha', '# unprintable']
+ table.field_names = ['Length', '# Digits', '# Alpha', '# Punct.', '# Control']
table.add_row((len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
- sum(x in string.printable for x in s)))
+ sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
result += str(table) + '\n'
return result
|
Add punctuation to basic info
|
## Code Before:
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
name = 'BasicInfoPlugin'
short_description = 'Basic info:'
default = True
description = textwrap.dedent('''
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes
''')
def handle(self):
result = ''
for s in self.args['STRING']:
if len(self.args['STRING']) > 1:
result += '{0}:\n'.format(s)
table = VeryPrettyTable()
table.field_names = ['Length', '# Digits', '# Alpha', '# unprintable']
table.add_row((len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.printable for x in s)))
result += str(table) + '\n'
return result
## Instruction:
Add punctuation to basic info
## Code After:
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
name = 'BasicInfoPlugin'
short_description = 'Basic info:'
default = True
description = textwrap.dedent('''
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes
''')
def handle(self):
result = ''
for s in self.args['STRING']:
if len(self.args['STRING']) > 1:
result += '{0}:\n'.format(s)
table = VeryPrettyTable()
table.field_names = ['Length', '# Digits', '# Alpha', '# Punct.', '# Control']
table.add_row((len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
result += str(table) + '\n'
return result
|
import string
import textwrap
from veryprettytable import VeryPrettyTable
from plugins import BasePlugin
__author__ = 'peter'
class BasicInfoPlugin(BasePlugin):
name = 'BasicInfoPlugin'
short_description = 'Basic info:'
default = True
description = textwrap.dedent('''
This plugin provides some basic info about the string such as:
- Length
- Presence of alpha/digits/raw bytes
''')
def handle(self):
result = ''
for s in self.args['STRING']:
if len(self.args['STRING']) > 1:
result += '{0}:\n'.format(s)
table = VeryPrettyTable()
- table.field_names = ['Length', '# Digits', '# Alpha', '# unprintable']
? ^^^ ^^ -
+ table.field_names = ['Length', '# Digits', '# Alpha', '# Punct.', '# Control']
? + ^^^^^^^^^^^ ^^
table.add_row((len(s), sum(x.isdigit() for x in s), sum(x.isalpha() for x in s),
- sum(x in string.printable for x in s)))
+ sum(x in string.punctuation for x in s), sum(x not in string.printable for x in s)))
result += str(table) + '\n'
return result
|
ac0a166f96509c37ade42e9ae4c35f43137bbbbb
|
mygpoauth/login/urls.py
|
mygpoauth/login/urls.py
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
path('', auth_views.login, {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
path('', auth_views.LoginView.as_view(), {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
Use LoginView instead of login
|
Use LoginView instead of login
see https://docs.djangoproject.com/en/dev/releases/1.11/#django-contrib-auth
|
Python
|
agpl-3.0
|
gpodder/mygpo-auth,gpodder/mygpo-auth
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
- path('', auth_views.login, {
+ path('', auth_views.LoginView.as_view(), {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
Use LoginView instead of login
|
## Code Before:
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
path('', auth_views.login, {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
## Instruction:
Use LoginView instead of login
## Code After:
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
path('', auth_views.LoginView.as_view(), {
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
from . import forms
app_name = 'login'
urlpatterns = [
- path('', auth_views.login, {
? ^
+ path('', auth_views.LoginView.as_view(), {
? ^ ++++++++++++++
'template_name': 'login/login.html',
'authentication_form': forms.MyAuthenticationForm,
},
name='login'),
]
|
cda63e96b042de04b3aa12348a411229e9b9d973
|
tools/glidein_cat.py
|
tools/glidein_cat.py
|
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
|
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
|
Change rel paths into abspaths and use helper module
|
Change rel paths into abspaths and use helper module
|
Python
|
bsd-3-clause
|
bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS
|
+ import sys,os.path
+ sys.path.append(os.path.join(sys.path[0],"lib"))
+ sys.path.append(os.path.join(sys.path[0],"../lib"))
- import os
- import string
- import stat
- import sys
- sys.path.append("lib")
- sys.path.append("../lib")
- import glideinMonitor
+ import glideinCmd
+ glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
- def createCatMonitorFile(monitor_file_name,monitor_control_relname,
- argv,condor_status):
- fd=open(monitor_file_name,"w")
- try:
- fd.write("#!/bin/sh\n")
- fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
- fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
- fd.write("echo Done > %s\n"%monitor_control_relname)
- finally:
- fd.close()
- os.chmod(monitor_file_name,stat.S_IRWXU)
-
-
- args=glideinMonitor.parseArgs(sys.argv[1:])
-
- glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
- args['timeout'],
- createCatMonitorFile,args['argv'])
-
|
Change rel paths into abspaths and use helper module
|
## Code Before:
import os
import string
import stat
import sys
sys.path.append("lib")
sys.path.append("../lib")
import glideinMonitor
def createCatMonitorFile(monitor_file_name,monitor_control_relname,
argv,condor_status):
fd=open(monitor_file_name,"w")
try:
fd.write("#!/bin/sh\n")
fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
fd.write("echo Done > %s\n"%monitor_control_relname)
finally:
fd.close()
os.chmod(monitor_file_name,stat.S_IRWXU)
args=glideinMonitor.parseArgs(sys.argv[1:])
glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
args['timeout'],
createCatMonitorFile,args['argv'])
## Instruction:
Change rel paths into abspaths and use helper module
## Code After:
import sys,os.path
sys.path.append(os.path.join(sys.path[0],"lib"))
sys.path.append(os.path.join(sys.path[0],"../lib"))
import glideinCmd
glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
|
+ import sys,os.path
+ sys.path.append(os.path.join(sys.path[0],"lib"))
+ sys.path.append(os.path.join(sys.path[0],"../lib"))
- import os
- import string
- import stat
- import sys
- sys.path.append("lib")
- sys.path.append("../lib")
- import glideinMonitor
+ import glideinCmd
+ glideinCmd.exe_cmd(lambda argv:(['cat']+argv))
- def createCatMonitorFile(monitor_file_name,monitor_control_relname,
- argv,condor_status):
- fd=open(monitor_file_name,"w")
- try:
- fd.write("#!/bin/sh\n")
- fd.write("outdir=`ls -lt .. | tail -1 | awk '{print $9}'`\n")
- fd.write("(cd ../$outdir; if [ $? -eq 0 ]; then cat %s; else echo Internal error; fi)\n"%(string.join(argv)))
- fd.write("echo Done > %s\n"%monitor_control_relname)
- finally:
- fd.close()
-
- os.chmod(monitor_file_name,stat.S_IRWXU)
-
-
- args=glideinMonitor.parseArgs(sys.argv[1:])
-
- glideinMonitor.monitor(args['jid'],args['schedd_name'],args['pool_name'],
- args['timeout'],
- createCatMonitorFile,args['argv'])
|
5761364149b3171521cb4f72f591dc5f5cbd77d6
|
temp-sensor02/main.py
|
temp-sensor02/main.py
|
from machine import Pin
from ds18x20 import DS18X20
import onewire
import time
import machine
import ujson
import urequests
def posttocloud(temperature):
keystext = open("sparkfun_keys.json").read()
keys = ujson.loads(keystext)
url = keys['inputUrl'] + "?private_key=" + keys['privateKey'] + "&temp=" + str(temperature)
#data = {'temp':temperature}
#data['private_key'] = keys['privateKey']
#print (keys['inputUrl'])
#print(keys['privateKey'])
#datajson = ujson.dumps(data)
#print (datajson)
resp = urequests.request("POST", url)
print (resp.text)
while True:
p = Pin(2) # Data Line is on GPIO2 aka D4
ow = onewire.OneWire(p)
ds = DS18X20(ow)
lstrom = ds.scan()
#Assuming we have only 1 device connected
rom = lstrom[0]
ds.convert_temp()
time.sleep_ms(750)
temperature = round(float(ds.read_temp(rom)),1)
#print("Temperature: {:02.1f}".format(temperature))
posttocloud(temperature)
time.sleep(10)
|
from machine import Pin
from ds18x20 import DS18X20
import onewire
import time
import ujson
import urequests
def posttocloud(temperature):
keystext = open("sparkfun_keys.json").read()
keys = ujson.loads(keystext)
params = {}
params['temp'] = temperature
params['private_key'] = keys['privateKey']
#data.sparkfun doesn't support putting data into the POST Body.
#We had to add the data to the query string
#Copied the Dirty hack from
#https://github.com/matze/python-phant/blob/24edb12a449b87700a4f736e43a5415b1d021823/phant/__init__.py
payload_str = "&".join("%s=%s" % (k, v) for k, v in params.items())
url = keys['inputUrl'] + "?" + payload_str
resp = urequests.request("POST", url)
print (resp.text)
while True:
p = Pin(2) # Data Line is on GPIO2 aka D4
ow = onewire.OneWire(p)
ds = DS18X20(ow)
lstrom = ds.scan()
#Assuming we have only 1 device connected
rom = lstrom[0]
ds.convert_temp()
time.sleep_ms(750)
temperature = round(float(ds.read_temp(rom)),1)
#print("Temperature: {:02.1f}".format(temperature))
posttocloud(temperature)
time.sleep(10)
|
Build a query string with params in a dictionary and append it to the URL. Makes the code readale. Remove commented code
|
Build a query string with params in a dictionary and append it to the URL. Makes the code readale. Remove commented code
|
Python
|
mit
|
fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout,fuzzyhandle/esp8266hangout
|
from machine import Pin
from ds18x20 import DS18X20
import onewire
import time
- import machine
import ujson
import urequests
def posttocloud(temperature):
+
keystext = open("sparkfun_keys.json").read()
keys = ujson.loads(keystext)
- url = keys['inputUrl'] + "?private_key=" + keys['privateKey'] + "&temp=" + str(temperature)
- #data = {'temp':temperature}
+ params = {}
+ params['temp'] = temperature
- #data['private_key'] = keys['privateKey']
+ params['private_key'] = keys['privateKey']
- #print (keys['inputUrl'])
- #print(keys['privateKey'])
- #datajson = ujson.dumps(data)
- #print (datajson)
+
+ #data.sparkfun doesn't support putting data into the POST Body.
+ #We had to add the data to the query string
+ #Copied the Dirty hack from
+ #https://github.com/matze/python-phant/blob/24edb12a449b87700a4f736e43a5415b1d021823/phant/__init__.py
+ payload_str = "&".join("%s=%s" % (k, v) for k, v in params.items())
+ url = keys['inputUrl'] + "?" + payload_str
resp = urequests.request("POST", url)
print (resp.text)
while True:
p = Pin(2) # Data Line is on GPIO2 aka D4
ow = onewire.OneWire(p)
ds = DS18X20(ow)
lstrom = ds.scan()
#Assuming we have only 1 device connected
rom = lstrom[0]
ds.convert_temp()
time.sleep_ms(750)
temperature = round(float(ds.read_temp(rom)),1)
#print("Temperature: {:02.1f}".format(temperature))
posttocloud(temperature)
time.sleep(10)
|
Build a query string with params in a dictionary and append it to the URL. Makes the code readale. Remove commented code
|
## Code Before:
from machine import Pin
from ds18x20 import DS18X20
import onewire
import time
import machine
import ujson
import urequests
def posttocloud(temperature):
keystext = open("sparkfun_keys.json").read()
keys = ujson.loads(keystext)
url = keys['inputUrl'] + "?private_key=" + keys['privateKey'] + "&temp=" + str(temperature)
#data = {'temp':temperature}
#data['private_key'] = keys['privateKey']
#print (keys['inputUrl'])
#print(keys['privateKey'])
#datajson = ujson.dumps(data)
#print (datajson)
resp = urequests.request("POST", url)
print (resp.text)
while True:
p = Pin(2) # Data Line is on GPIO2 aka D4
ow = onewire.OneWire(p)
ds = DS18X20(ow)
lstrom = ds.scan()
#Assuming we have only 1 device connected
rom = lstrom[0]
ds.convert_temp()
time.sleep_ms(750)
temperature = round(float(ds.read_temp(rom)),1)
#print("Temperature: {:02.1f}".format(temperature))
posttocloud(temperature)
time.sleep(10)
## Instruction:
Build a query string with params in a dictionary and append it to the URL. Makes the code readale. Remove commented code
## Code After:
from machine import Pin
from ds18x20 import DS18X20
import onewire
import time
import ujson
import urequests
def posttocloud(temperature):
keystext = open("sparkfun_keys.json").read()
keys = ujson.loads(keystext)
params = {}
params['temp'] = temperature
params['private_key'] = keys['privateKey']
#data.sparkfun doesn't support putting data into the POST Body.
#We had to add the data to the query string
#Copied the Dirty hack from
#https://github.com/matze/python-phant/blob/24edb12a449b87700a4f736e43a5415b1d021823/phant/__init__.py
payload_str = "&".join("%s=%s" % (k, v) for k, v in params.items())
url = keys['inputUrl'] + "?" + payload_str
resp = urequests.request("POST", url)
print (resp.text)
while True:
p = Pin(2) # Data Line is on GPIO2 aka D4
ow = onewire.OneWire(p)
ds = DS18X20(ow)
lstrom = ds.scan()
#Assuming we have only 1 device connected
rom = lstrom[0]
ds.convert_temp()
time.sleep_ms(750)
temperature = round(float(ds.read_temp(rom)),1)
#print("Temperature: {:02.1f}".format(temperature))
posttocloud(temperature)
time.sleep(10)
|
from machine import Pin
from ds18x20 import DS18X20
import onewire
import time
- import machine
import ujson
import urequests
def posttocloud(temperature):
+
keystext = open("sparkfun_keys.json").read()
keys = ujson.loads(keystext)
- url = keys['inputUrl'] + "?private_key=" + keys['privateKey'] + "&temp=" + str(temperature)
- #data = {'temp':temperature}
+ params = {}
+ params['temp'] = temperature
- #data['private_key'] = keys['privateKey']
? ^^ ^
+ params['private_key'] = keys['privateKey']
? ^ ^ ++
- #print (keys['inputUrl'])
- #print(keys['privateKey'])
- #datajson = ujson.dumps(data)
- #print (datajson)
+
+ #data.sparkfun doesn't support putting data into the POST Body.
+ #We had to add the data to the query string
+ #Copied the Dirty hack from
+ #https://github.com/matze/python-phant/blob/24edb12a449b87700a4f736e43a5415b1d021823/phant/__init__.py
+ payload_str = "&".join("%s=%s" % (k, v) for k, v in params.items())
+ url = keys['inputUrl'] + "?" + payload_str
resp = urequests.request("POST", url)
print (resp.text)
while True:
p = Pin(2) # Data Line is on GPIO2 aka D4
ow = onewire.OneWire(p)
ds = DS18X20(ow)
lstrom = ds.scan()
#Assuming we have only 1 device connected
rom = lstrom[0]
ds.convert_temp()
time.sleep_ms(750)
temperature = round(float(ds.read_temp(rom)),1)
#print("Temperature: {:02.1f}".format(temperature))
posttocloud(temperature)
time.sleep(10)
|
9921b6bd73c5256a3b65c2a5106717ce0fc8f0cf
|
djangorestframework/utils/breadcrumbs.py
|
djangorestframework/utils/breadcrumbs.py
|
from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
|
from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
Use get_script_prefix to play nicely if not installed at the root.
|
Use get_script_prefix to play nicely if not installed at the root.
|
Python
|
bsd-2-clause
|
rafaelcaricio/django-rest-framework,maryokhin/django-rest-framework,jtiai/django-rest-framework,cheif/django-rest-framework,vstoykov/django-rest-framework,wwj718/django-rest-framework,ebsaral/django-rest-framework,jpadilla/django-rest-framework,damycra/django-rest-framework,kezabelle/django-rest-framework,cyberj/django-rest-framework,hnarayanan/django-rest-framework,kgeorgy/django-rest-framework,antonyc/django-rest-framework,lubomir/django-rest-framework,ambivalentno/django-rest-framework,potpath/django-rest-framework,ashishfinoit/django-rest-framework,waytai/django-rest-framework,nhorelik/django-rest-framework,iheitlager/django-rest-framework,rubendura/django-rest-framework,aericson/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,atombrella/django-rest-framework,krinart/django-rest-framework,HireAnEsquire/django-rest-framework,gregmuellegger/django-rest-framework,thedrow/django-rest-framework-1,mgaitan/django-rest-framework,hnakamur/django-rest-framework,callorico/django-rest-framework,hnakamur/django-rest-framework,tigeraniya/django-rest-framework,douwevandermeij/django-rest-framework,dmwyatt/django-rest-framework,agconti/django-rest-framework,canassa/django-rest-framework,johnraz/django-rest-framework,linovia/django-rest-framework,wwj718/django-rest-framework,brandoncazander/django-rest-framework,canassa/django-rest-framework,ashishfinoit/django-rest-framework,agconti/django-rest-framework,krinart/django-rest-framework,xiaotangyuan/django-rest-framework,cyberj/django-rest-framework,uruz/django-rest-framework,akalipetis/django-rest-framework,tcroiset/django-rest-framework,buptlsl/django-rest-framework,d0ugal/django-rest-framework,kgeorgy/django-rest-framework,vstoykov/django-rest-framework,thedrow/django-rest-framework-1,jerryhebert/django-rest-framework,delinhabit/django-rest-framework,davesque/django-rest-framework,potpath/django-rest-framework,kennydude/django-rest-framework,qsorix/django-rest-framework,uploadcare/django-rest-framework,ebsaral/django-rest-framework,jness/django-rest-framework,MJafarMashhadi/django-rest-framework,tcroiset/django-rest-framework,adambain-vokal/django-rest-framework,johnraz/django-rest-framework,mgaitan/django-rest-framework,wedaly/django-rest-framework,sheppard/django-rest-framework,uploadcare/django-rest-framework,rhblind/django-rest-framework,sehmaschine/django-rest-framework,sbellem/django-rest-framework,arpheno/django-rest-framework,rafaelang/django-rest-framework,bluedazzle/django-rest-framework,jtiai/django-rest-framework,antonyc/django-rest-framework,ajaali/django-rest-framework,rhblind/django-rest-framework,ebsaral/django-rest-framework,akalipetis/django-rest-framework,buptlsl/django-rest-framework,potpath/django-rest-framework,xiaotangyuan/django-rest-framework,jerryhebert/django-rest-framework,douwevandermeij/django-rest-framework,qsorix/django-rest-framework,callorico/django-rest-framework,sbellem/django-rest-framework,sehmaschine/django-rest-framework,elim/django-rest-framework,akalipetis/django-rest-framework,kylefox/django-rest-framework,adambain-vokal/django-rest-framework,maryokhin/django-rest-framework,fishky/django-rest-framework,werthen/django-rest-framework,simudream/django-rest-framework,delinhabit/django-rest-framework,aericson/django-rest-framework,abdulhaq-e/django-rest-framework,simudream/django-rest-framework,paolopaolopaolo/django-rest-framework,jpulec/django-rest-framework,James1345/django-rest-framework,fishky/django-rest-framework,ajaali/django-rest-framework,ashishfinoit/django-rest-framework,alacritythief/django-rest-framework,ticosax/django-rest-framework,cheif/django-rest-framework,wedaly/django-rest-framework,callorico/django-rest-framework,YBJAY00000/django-rest-framework,mgaitan/django-rest-framework,YBJAY00000/django-rest-framework,wzbozon/django-rest-framework,bluedazzle/django-rest-framework,elim/django-rest-framework,kylefox/django-rest-framework,alacritythief/django-rest-framework,kennydude/django-rest-framework,hunter007/django-rest-framework,abdulhaq-e/django-rest-framework,leeahoward/django-rest-framework,damycra/django-rest-framework,andriy-s/django-rest-framework,waytai/django-rest-framework,aericson/django-rest-framework,leeahoward/django-rest-framework,wzbozon/django-rest-framework,wangpanjun/django-rest-framework,tcroiset/django-rest-framework,bluedazzle/django-rest-framework,jness/django-rest-framework,tigeraniya/django-rest-framework,andriy-s/django-rest-framework,hnakamur/django-rest-framework,rhblind/django-rest-framework,nryoung/django-rest-framework,edx/django-rest-framework,AlexandreProenca/django-rest-framework,simudream/django-rest-framework,nhorelik/django-rest-framework,krinart/django-rest-framework,wangpanjun/django-rest-framework,davesque/django-rest-framework,ticosax/django-rest-framework,jpulec/django-rest-framework,hunter007/django-rest-framework,jpulec/django-rest-framework,maryokhin/django-rest-framework,iheitlager/django-rest-framework,rafaelcaricio/django-rest-framework,yiyocx/django-rest-framework,jpadilla/django-rest-framework,jerryhebert/django-rest-framework,ezheidtmann/django-rest-framework,waytai/django-rest-framework,tomchristie/django-rest-framework,gregmuellegger/django-rest-framework,wzbozon/django-rest-framework,James1345/django-rest-framework,werthen/django-rest-framework,atombrella/django-rest-framework,pombredanne/django-rest-framework,adambain-vokal/django-rest-framework,paolopaolopaolo/django-rest-framework,gregmuellegger/django-rest-framework,qsorix/django-rest-framework,raphaelmerx/django-rest-framework,dmwyatt/django-rest-framework,brandoncazander/django-rest-framework,xiaotangyuan/django-rest-framework,AlexandreProenca/django-rest-framework,werthen/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,jpadilla/django-rest-framework,ajaali/django-rest-framework,kylefox/django-rest-framework,ossanna16/django-rest-framework,justanr/django-rest-framework,pombredanne/django-rest-framework,YBJAY00000/django-rest-framework,atombrella/django-rest-framework,kezabelle/django-rest-framework,James1345/django-rest-framework,MJafarMashhadi/django-rest-framework,iheitlager/django-rest-framework,wangpanjun/django-rest-framework,ticosax/django-rest-framework,edx/django-rest-framework,d0ugal/django-rest-framework,rubendura/django-rest-framework,HireAnEsquire/django-rest-framework,cheif/django-rest-framework,nryoung/django-rest-framework,AlexandreProenca/django-rest-framework,brandoncazander/django-rest-framework,arpheno/django-rest-framework,MJafarMashhadi/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,nryoung/django-rest-framework,lubomir/django-rest-framework,ossanna16/django-rest-framework,thedrow/django-rest-framework-1,justanr/django-rest-framework,buptlsl/django-rest-framework,lubomir/django-rest-framework,vstoykov/django-rest-framework,zeldalink0515/django-rest-framework,raphaelmerx/django-rest-framework,damycra/django-rest-framework,ambivalentno/django-rest-framework,nhorelik/django-rest-framework,VishvajitP/django-rest-framework,kezabelle/django-rest-framework,sheppard/django-rest-framework,leeahoward/django-rest-framework,dmwyatt/django-rest-framework,wedaly/django-rest-framework,justanr/django-rest-framework,uruz/django-rest-framework,delinhabit/django-rest-framework,tomchristie/django-rest-framework,VishvajitP/django-rest-framework,canassa/django-rest-framework,rafaelang/django-rest-framework,andriy-s/django-rest-framework,hunter007/django-rest-framework,paolopaolopaolo/django-rest-framework,hnarayanan/django-rest-framework,HireAnEsquire/django-rest-framework,abdulhaq-e/django-rest-framework,jness/django-rest-framework,douwevandermeij/django-rest-framework,pombredanne/django-rest-framework,ossanna16/django-rest-framework,linovia/django-rest-framework,cyberj/django-rest-framework,wwj718/django-rest-framework,d0ugal/django-rest-framework,sheppard/django-rest-framework,sehmaschine/django-rest-framework,tigeraniya/django-rest-framework,linovia/django-rest-framework,zeldalink0515/django-rest-framework,alacritythief/django-rest-framework,uruz/django-rest-framework,VishvajitP/django-rest-framework,ambivalentno/django-rest-framework,fishky/django-rest-framework,tomchristie/django-rest-framework,sbellem/django-rest-framework,zeldalink0515/django-rest-framework,arpheno/django-rest-framework,agconti/django-rest-framework,davesque/django-rest-framework,elim/django-rest-framework,jtiai/django-rest-framework,yiyocx/django-rest-framework,yiyocx/django-rest-framework,edx/django-rest-framework,johnraz/django-rest-framework,antonyc/django-rest-framework,ezheidtmann/django-rest-framework,ezheidtmann/django-rest-framework,rafaelcaricio/django-rest-framework,rafaelang/django-rest-framework
|
- from django.core.urlresolvers import resolve
+ from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
- def breadcrumbs_recursive(url, breadcrumbs_list):
+ def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
- breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
+ breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
- return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
+ return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
- return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
+ return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
+ prefix = get_script_prefix()
+ url = url[len(prefix):]
- return breadcrumbs_recursive(url, [])
+ return breadcrumbs_recursive(url, [], prefix)
|
Use get_script_prefix to play nicely if not installed at the root.
|
## Code Before:
from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
## Instruction:
Use get_script_prefix to play nicely if not installed at the root.
## Code After:
from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
- from django.core.urlresolvers import resolve
+ from django.core.urlresolvers import resolve, get_script_prefix
? +++++++++++++++++++
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
- def breadcrumbs_recursive(url, breadcrumbs_list):
+ def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
? ++++++++
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
- breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
+ breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
? +++++++++
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
- return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
+ return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
? ++++++++
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
- return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
+ return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
? ++++++++
+ prefix = get_script_prefix()
+ url = url[len(prefix):]
- return breadcrumbs_recursive(url, [])
+ return breadcrumbs_recursive(url, [], prefix)
? ++++++++
|
962b674053ecf52730315550675c29fa8ba8ec12
|
openprovider/data/exception_map.py
|
openprovider/data/exception_map.py
|
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
Add maintenance response to exception map
|
Add maintenance response to exception map
|
Python
|
mit
|
AntagonistHQ/openprovider.py
|
from openprovider.exceptions import *
MAPPING = {
- 307: BadRequest, # Invalid domain extension
+ 307: BadRequest, # Invalid domain extension
- 501: BadRequest, # Domain name too short
+ 501: BadRequest, # Domain name too short
+ 4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
Add maintenance response to exception map
|
## Code Before:
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
## Instruction:
Add maintenance response to exception map
## Code After:
from openprovider.exceptions import *
MAPPING = {
307: BadRequest, # Invalid domain extension
501: BadRequest, # Domain name too short
4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
from openprovider.exceptions import *
MAPPING = {
- 307: BadRequest, # Invalid domain extension
+ 307: BadRequest, # Invalid domain extension
? ++++++++
- 501: BadRequest, # Domain name too short
+ 501: BadRequest, # Domain name too short
? ++++++++
+ 4005: ServiceUnavailable, # Temprorarily unavailable due to maintenance
}
def from_code(code):
"""
Return the specific exception class for the given code, or OpenproviderError
if no specific exception class is available.
"""
if code in MAPPING:
return MAPPING[code]
else:
return OpenproviderError
|
4c95937d43f6ec769412b0cb8b58546ecb5617ec
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='[email protected]',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='[email protected]',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
Change the tagline for PyPI
|
Change the tagline for PyPI
|
Python
|
mit
|
jongiddy/jute,jongiddy/jute
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
- version='0.1',
+ version='0.1.0',
- description='Yet another interface module for Python',
+ description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='[email protected]',
url='https://github.com/jongiddy/jute',
- download_url='https://github.com/jongiddy/jute/tarball/0.1',
+ download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
Change the tagline for PyPI
|
## Code Before:
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='[email protected]',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
## Instruction:
Change the tagline for PyPI
## Code After:
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='[email protected]',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
- version='0.1',
+ version='0.1.0',
? ++
- description='Yet another interface module for Python',
+ description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='[email protected]',
url='https://github.com/jongiddy/jute',
- download_url='https://github.com/jongiddy/jute/tarball/0.1',
+ download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
? + ++
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
9be2846e408699308798b698754634ce7f370710
|
openedx/stanford/cms/urls.py
|
openedx/stanford/cms/urls.py
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
Add names to stanford view handlers
|
Add names to stanford view handlers
|
Python
|
agpl-3.0
|
Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
+ name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
+ name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
+ name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
Add names to stanford view handlers
|
## Code Before:
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
## Instruction:
Add names to stanford view handlers
## Code After:
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
from django.conf import settings
from django.conf.urls import url
import contentstore.views
urlpatterns = [
url(
r'^settings/send_test_enrollment_email/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.send_test_enrollment_email,
name='send_test_enrollment_email',
),
url(
r'^utilities/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_handler,
+ name='utility_handler',
),
url(
r'^utility/captions/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_captions_handler,
+ name='utility_captions_handler',
),
url(
r'^utility/bulksettings/{}$'.format(settings.COURSE_KEY_PATTERN),
contentstore.views.utility_bulksettings_handler,
+ name='utility_bulksettings_handler',
),
]
if settings.SHIB_ONLY_SITE:
urlpatterns += [
url(
r'^backup_signup$',
contentstore.views.signup,
name='backup_signup',
),
url(
r'^backup_signin$',
contentstore.views.login_page,
name='backup_login',
),
]
|
a88d5414e7762e87c052ca9a28fe36a28b7d4d46
|
oscar/apps/partner/prices.py
|
oscar/apps/partner/prices.py
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
Add currency property to pricing policies
|
Add currency property to pricing policies
|
Python
|
bsd-3-clause
|
pdonadeo/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,bschuon/django-oscar,saadatqadri/django-oscar,kapari/django-oscar,adamend/django-oscar,adamend/django-oscar,bnprk/django-oscar,sonofatailor/django-oscar,Jannes123/django-oscar,faratro/django-oscar,MatthewWilkes/django-oscar,sasha0/django-oscar,anentropic/django-oscar,anentropic/django-oscar,manevant/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,saadatqadri/django-oscar,spartonia/django-oscar,DrOctogon/unwash_ecom,faratro/django-oscar,nickpack/django-oscar,QLGu/django-oscar,ademuk/django-oscar,okfish/django-oscar,jmt4/django-oscar,anentropic/django-oscar,adamend/django-oscar,kapari/django-oscar,thechampanurag/django-oscar,okfish/django-oscar,amirrpp/django-oscar,eddiep1101/django-oscar,jlmadurga/django-oscar,pasqualguerrero/django-oscar,michaelkuty/django-oscar,itbabu/django-oscar,rocopartners/django-oscar,ka7eh/django-oscar,bschuon/django-oscar,eddiep1101/django-oscar,john-parton/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,pasqualguerrero/django-oscar,machtfit/django-oscar,binarydud/django-oscar,jinnykoo/wuyisj,nickpack/django-oscar,Idematica/django-oscar,itbabu/django-oscar,saadatqadri/django-oscar,kapari/django-oscar,MatthewWilkes/django-oscar,john-parton/django-oscar,bnprk/django-oscar,pdonadeo/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,Bogh/django-oscar,monikasulik/django-oscar,Idematica/django-oscar,Jannes123/django-oscar,ademuk/django-oscar,solarissmoke/django-oscar,mexeniz/django-oscar,nfletton/django-oscar,pasqualguerrero/django-oscar,amirrpp/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj,adamend/django-oscar,manevant/django-oscar,ka7eh/django-oscar,jmt4/django-oscar,kapt/django-oscar,lijoantony/django-oscar,solarissmoke/django-oscar,ahmetdaglarbas/e-commerce,jmt4/django-oscar,spartonia/django-oscar,marcoantoniooliveira/labweb,jmt4/django-oscar,rocopartners/django-oscar,pdonadeo/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,jinnykoo/wuyisj.com,nfletton/django-oscar,josesanch/django-oscar,elliotthill/django-oscar,rocopartners/django-oscar,ahmetdaglarbas/e-commerce,QLGu/django-oscar,amirrpp/django-oscar,okfish/django-oscar,ka7eh/django-oscar,eddiep1101/django-oscar,Idematica/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,solarissmoke/django-oscar,taedori81/django-oscar,jinnykoo/wuyisj,vovanbo/django-oscar,binarydud/django-oscar,django-oscar/django-oscar,nfletton/django-oscar,lijoantony/django-oscar,WadeYuChen/django-oscar,jinnykoo/wuyisj,jlmadurga/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,QLGu/django-oscar,machtfit/django-oscar,michaelkuty/django-oscar,john-parton/django-oscar,elliotthill/django-oscar,john-parton/django-oscar,DrOctogon/unwash_ecom,Jannes123/django-oscar,manevant/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,Bogh/django-oscar,monikasulik/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,WadeYuChen/django-oscar,bnprk/django-oscar,kapt/django-oscar,ademuk/django-oscar,binarydud/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,nickpack/django-oscar,jinnykoo/christmas,itbabu/django-oscar,thechampanurag/django-oscar,jinnykoo/wuyisj.com,vovanbo/django-oscar,okfish/django-oscar,nickpack/django-oscar,makielab/django-oscar,elliotthill/django-oscar,jinnykoo/christmas,manevant/django-oscar,mexeniz/django-oscar,bschuon/django-oscar,dongguangming/django-oscar,django-oscar/django-oscar,taedori81/django-oscar,lijoantony/django-oscar,jlmadurga/django-oscar,thechampanurag/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,binarydud/django-oscar,josesanch/django-oscar,faratro/django-oscar,taedori81/django-oscar,mexeniz/django-oscar,marcoantoniooliveira/labweb,pdonadeo/django-oscar,machtfit/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,WadeYuChen/django-oscar,kapt/django-oscar,nfletton/django-oscar,taedori81/django-oscar,michaelkuty/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,sasha0/django-oscar,WillisXChen/django-oscar,ahmetdaglarbas/e-commerce,mexeniz/django-oscar,jinnykoo/christmas,marcoantoniooliveira/labweb,makielab/django-oscar,thechampanurag/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,pasqualguerrero/django-oscar,django-oscar/django-oscar,monikasulik/django-oscar,vovanbo/django-oscar,faratro/django-oscar,lijoantony/django-oscar,monikasulik/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,makielab/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,WillisXChen/django-oscar,bschuon/django-oscar,michaelkuty/django-oscar,makielab/django-oscar,WadeYuChen/django-oscar,dongguangming/django-oscar,rocopartners/django-oscar,DrOctogon/unwash_ecom
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
+ """
+ The interface that any pricing policy must support
+ """
+
#: Whether any prices exist
exists = False
- #: Whether tax is known for this product (and session)
+ #: Whether tax is known
is_tax_known = False
- # Normal price properties
+ #: Normal price properties
excl_tax = incl_tax = tax = None
+
+ #: Currency prices are in
+ currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
+ @property
+ def currency(self):
+ return self.stockrecord.price_currency
+
|
Add currency property to pricing policies
|
## Code Before:
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
#: Whether any prices exist
exists = False
#: Whether tax is known for this product (and session)
is_tax_known = False
# Normal price properties
excl_tax = incl_tax = tax = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
## Instruction:
Add currency property to pricing policies
## Code After:
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
"""
The interface that any pricing policy must support
"""
#: Whether any prices exist
exists = False
#: Whether tax is known
is_tax_known = False
#: Normal price properties
excl_tax = incl_tax = tax = None
#: Currency prices are in
currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
@property
def currency(self):
return self.stockrecord.price_currency
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Base(object):
+ """
+ The interface that any pricing policy must support
+ """
+
#: Whether any prices exist
exists = False
- #: Whether tax is known for this product (and session)
+ #: Whether tax is known
is_tax_known = False
- # Normal price properties
+ #: Normal price properties
? +
excl_tax = incl_tax = tax = None
+
+ #: Currency prices are in
+ currency = None
class Unavailable(Base):
"""
No stockrecord, therefore no prices
"""
class FixedPrice(Base):
exists = True
def __init__(self, excl_tax, tax=None):
self.excl_tax = excl_tax
self.tax = tax
@property
def incl_tax(self):
if self.is_tax_known:
return self.excl_tax + self.tax
raise TaxNotKnown("Can't calculate price.incl_tax as tax isn't known")
@property
def is_tax_known(self):
return self.tax is not None
class DelegateToStockRecord(Base):
is_tax_known = True
def __init__(self, stockrecord):
self.stockrecord = stockrecord
@property
def exists(self):
return self.stockrecord is not None
@property
def excl_tax(self):
return self.stockrecord.price_excl_tax
@property
def incl_tax(self):
return self.stockrecord.price_incl_tax
@property
def tax(self):
return self.stockrecord.price_tax
+
+ @property
+ def currency(self):
+ return self.stockrecord.price_currency
|
40d0b5d2d86de6954b93fa90d7a04a84e9e2248b
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
Use g-cloud-10 services mapping for tests
|
Use g-cloud-10 services mapping for tests
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
- "services",
+ "services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
Use g-cloud-10 services mapping for tests
|
## Code Before:
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
## Instruction:
Use g-cloud-10 services mapping for tests
## Code After:
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
"services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
import pytest
import pathlib
import json
import app.mapping
mappings_dir = (pathlib.Path(__file__).parent / "../mappings").resolve()
services_mappings = (
- "services",
+ "services-g-cloud-10",
)
@pytest.fixture(scope="module", params=services_mappings)
def services_mapping_file_name_and_path(request):
return (request.param, mappings_dir / f"{request.param}.json")
@pytest.fixture()
def services_mapping(services_mapping_file_name_and_path):
"""Fixture that provides an Elastic Search mapping, for unit testing functions that expect to be passed one."""
services_mapping_dict = json.loads(services_mapping_file_name_and_path[1].read_text())
return app.mapping.Mapping(services_mapping_dict, "services")
def make_service(**kwargs):
service = {
"id": "id",
"lot": "LoT",
"serviceName": "serviceName",
"serviceDescription": "serviceDescription",
"serviceBenefits": "serviceBenefits",
"serviceFeatures": "serviceFeatures",
"serviceCategories": ["serviceCategories"],
"supplierName": "Supplier Name",
"publicSectorNetworksTypes": ["PSN", "PNN"],
}
service.update(kwargs)
return {
"document": service
}
@pytest.fixture()
def service():
"""
A fixture for a service such as might be indexed in the Search API.
:return: dict
"""
return make_service()
|
e457f09f280bc86bc7b5cdcfb4fa3ebf093402ec
|
services/dropbox.py
|
services/dropbox.py
|
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
|
import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
Upgrade Dropbox to OAuth 2
|
Upgrade Dropbox to OAuth 2
|
Python
|
bsd-3-clause
|
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
|
import foauth.providers
- from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
- class Dropbox(foauth.providers.OAuth1):
+ class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
- request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
- authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
+ authorize_url = 'https://www.dropbox.com/oauth2/authorize'
- access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
+ access_token_url = 'https://api.dropboxapi.com/oauth2/token'
- api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
+ api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
-
- signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
- r = self.api(key, self.api_domains[0], u'/1/account/info')
+ r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
- return unicode(r.json()[u'uid'])
+ return unicode(r.json()[u'account_id'])
|
Upgrade Dropbox to OAuth 2
|
## Code Before:
import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
class Dropbox(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/1/account/info')
return unicode(r.json()[u'uid'])
## Instruction:
Upgrade Dropbox to OAuth 2
## Code After:
import foauth.providers
class Dropbox(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
authorize_url = 'https://www.dropbox.com/oauth2/authorize'
access_token_url = 'https://api.dropboxapi.com/oauth2/token'
api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
return unicode(r.json()[u'account_id'])
|
import foauth.providers
- from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT
- class Dropbox(foauth.providers.OAuth1):
? ^
+ class Dropbox(foauth.providers.OAuth2):
? ^
# General info about the provider
provider_url = 'https://www.dropbox.com/'
docs_url = 'https://www.dropbox.com/developers/reference/api'
favicon_url = 'https://cf.dropboxstatic.com/static/images/favicon-vflk5FiAC.ico'
category = 'Files'
# URLs to interact with the API
- request_token_url = 'https://api.dropbox.com/1/oauth/request_token'
- authorize_url = 'https://www.dropbox.com/1/oauth/authorize'
? --
+ authorize_url = 'https://www.dropbox.com/oauth2/authorize'
? +
- access_token_url = 'https://api.dropbox.com/1/oauth/access_token'
? -- -------
+ access_token_url = 'https://api.dropboxapi.com/oauth2/token'
? +++ +
- api_domains = ['api.dropbox.com', 'api-content.dropbox.com']
? ----
+ api_domains = ['api.dropboxapi.com', 'content.dropboxapi.com', 'notify.dropboxapi.com']
? +++ ++++++++++++++++++++++++++++
-
- signature_method = SIGNATURE_PLAINTEXT
available_permissions = [
(None, 'read and write to your entire Dropbox'),
]
def get_user_id(self, key):
- r = self.api(key, self.api_domains[0], u'/1/account/info')
? ^ ^^^^
+ r = self.api(key, self.api_domains[0], u'/2/users/get_current_account', method='POST')
? ^ ++++++++++++++++++ ^^^^^^^ +++++++
- return unicode(r.json()[u'uid'])
+ return unicode(r.json()[u'account_id'])
? ++++ +++
|
18d2c4be27b58a142145f0726e6be21c358064cd
|
src/rnaseq_lib/docker/__init__.py
|
src/rnaseq_lib/docker/__init__.py
|
import os
from subprocess import call
def base_docker_call(mount):
return ['docker', 'run', '--rm', '-v', '{}:/data'.format(mount)]
def fix_directory_ownership(output_dir, tool):
"""
Uses a Docker container to change ownership recursively of a directory
:param str output_dir: Directory to change ownership of
:param str tool: Docker tool to use
"""
stat = os.stat(output_dir)
call(['docker', 'run', '--rm', '--entrypoint=chown', '-v', '{}:/data'.format(output_dir),
tool, '-R', '{}:{}'.format(stat.st_uid, stat.st_gid), '/data'])
|
import os
from subprocess import call
def base_docker_call(mount):
"""
Returns the boilerplate array used for Docker calls
:param str mount: Directory to mount
:return: Docker run parameters
:rtype: list(str)
"""
return ['docker', 'run', '--rm', '-v', '{}:/data'.format(os.path.abspath(mount))]
def fix_directory_ownership(output_dir, tool):
"""
Uses a Docker container to change ownership recursively of a directory
:param str output_dir: Directory to change ownership of
:param str tool: Docker tool to use
"""
stat = os.stat(output_dir)
call(['docker', 'run', '--rm', '--entrypoint=chown', '-v', '{}:/data'.format(output_dir),
tool, '-R', '{}:{}'.format(stat.st_uid, stat.st_gid), '/data'])
|
Use abspath for docker mount
|
Use abspath for docker mount
|
Python
|
mit
|
jvivian/rnaseq-lib,jvivian/rnaseq-lib
|
import os
from subprocess import call
def base_docker_call(mount):
+ """
+ Returns the boilerplate array used for Docker calls
+
+ :param str mount: Directory to mount
+ :return: Docker run parameters
+ :rtype: list(str)
+ """
- return ['docker', 'run', '--rm', '-v', '{}:/data'.format(mount)]
+ return ['docker', 'run', '--rm', '-v', '{}:/data'.format(os.path.abspath(mount))]
def fix_directory_ownership(output_dir, tool):
"""
Uses a Docker container to change ownership recursively of a directory
:param str output_dir: Directory to change ownership of
:param str tool: Docker tool to use
"""
stat = os.stat(output_dir)
call(['docker', 'run', '--rm', '--entrypoint=chown', '-v', '{}:/data'.format(output_dir),
tool, '-R', '{}:{}'.format(stat.st_uid, stat.st_gid), '/data'])
|
Use abspath for docker mount
|
## Code Before:
import os
from subprocess import call
def base_docker_call(mount):
return ['docker', 'run', '--rm', '-v', '{}:/data'.format(mount)]
def fix_directory_ownership(output_dir, tool):
"""
Uses a Docker container to change ownership recursively of a directory
:param str output_dir: Directory to change ownership of
:param str tool: Docker tool to use
"""
stat = os.stat(output_dir)
call(['docker', 'run', '--rm', '--entrypoint=chown', '-v', '{}:/data'.format(output_dir),
tool, '-R', '{}:{}'.format(stat.st_uid, stat.st_gid), '/data'])
## Instruction:
Use abspath for docker mount
## Code After:
import os
from subprocess import call
def base_docker_call(mount):
"""
Returns the boilerplate array used for Docker calls
:param str mount: Directory to mount
:return: Docker run parameters
:rtype: list(str)
"""
return ['docker', 'run', '--rm', '-v', '{}:/data'.format(os.path.abspath(mount))]
def fix_directory_ownership(output_dir, tool):
"""
Uses a Docker container to change ownership recursively of a directory
:param str output_dir: Directory to change ownership of
:param str tool: Docker tool to use
"""
stat = os.stat(output_dir)
call(['docker', 'run', '--rm', '--entrypoint=chown', '-v', '{}:/data'.format(output_dir),
tool, '-R', '{}:{}'.format(stat.st_uid, stat.st_gid), '/data'])
|
import os
from subprocess import call
def base_docker_call(mount):
+ """
+ Returns the boilerplate array used for Docker calls
+
+ :param str mount: Directory to mount
+ :return: Docker run parameters
+ :rtype: list(str)
+ """
- return ['docker', 'run', '--rm', '-v', '{}:/data'.format(mount)]
+ return ['docker', 'run', '--rm', '-v', '{}:/data'.format(os.path.abspath(mount))]
? ++++++++++++++++ +
def fix_directory_ownership(output_dir, tool):
"""
Uses a Docker container to change ownership recursively of a directory
:param str output_dir: Directory to change ownership of
:param str tool: Docker tool to use
"""
stat = os.stat(output_dir)
call(['docker', 'run', '--rm', '--entrypoint=chown', '-v', '{}:/data'.format(output_dir),
tool, '-R', '{}:{}'.format(stat.st_uid, stat.st_gid), '/data'])
|
65b1f849cbf02320992e3ef9db86c71e564cc826
|
src/mountebank/exceptions.py
|
src/mountebank/exceptions.py
|
class ImposterException(StandardError):
def __init__(self, response):
self._response = response
|
import sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
Make Python 2 and 3 compatible
|
Make Python 2 and 3 compatible
|
Python
|
bsd-2-clause
|
kevinjqiu/py-mountebank
|
- class ImposterException(StandardError):
+ import sys
+
+
+ if sys.version_info.major == 2:
+ error_base_class = StandardError
+ elif sys.version_info.major == 3:
+ error_base_class = Exception
+ else:
+ raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
+
+
+ class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
Make Python 2 and 3 compatible
|
## Code Before:
class ImposterException(StandardError):
def __init__(self, response):
self._response = response
## Instruction:
Make Python 2 and 3 compatible
## Code After:
import sys
if sys.version_info.major == 2:
error_base_class = StandardError
elif sys.version_info.major == 3:
error_base_class = Exception
else:
raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
- class ImposterException(StandardError):
+ import sys
+
+
+ if sys.version_info.major == 2:
+ error_base_class = StandardError
+ elif sys.version_info.major == 3:
+ error_base_class = Exception
+ else:
+ raise RuntimeError('Unsupported Python version: {}'.format(sys.version))
+
+
+ class ImposterException(error_base_class):
def __init__(self, response):
self._response = response
|
3e202c0dd4fa4c99ebee758a13ee5f6e205ef336
|
tests/functional/test_front_page.py
|
tests/functional/test_front_page.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
|
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
Change failed test case for front page to make build pass
|
Change failed test case for front page to make build pass
|
Python
|
apache-2.0
|
eavatar/eavatar-me,eavatar/eavatar-me,eavatar/eavatar-me,eavatar/eavatar-me
|
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
- def test_can_login_and_logout(self, page):
+ def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
- page.logout()
- # page.sleep(120)
- # page.assert_front_page()
|
Change failed test case for front page to make build pass
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
## Instruction:
Change failed test case for front page to make build pass
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
- def test_can_login_and_logout(self, page):
? -----------
+ def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
- page.logout()
- # page.sleep(120)
- # page.assert_front_page()
|
83c68749910933cdb3a8be1a4fc2c50709f671a1
|
admin/common_auth/forms.py
|
admin/common_auth/forms.py
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
Add checkboxselectmultiple widget for admin form
|
Add checkboxselectmultiple widget for admin form
|
Python
|
apache-2.0
|
felliott/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,erinspace/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,icereval/osf.io,chennan47/osf.io,mfraezz/osf.io,adlius/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,icereval/osf.io,cslzchen/osf.io,saradbowman/osf.io,binoculars/osf.io,aaxelb/osf.io,cwisecarver/osf.io,felliott/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,chrisseto/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,chrisseto/osf.io,pattisdr/osf.io,mfraezz/osf.io,TomBaxter/osf.io,baylee-d/osf.io,chrisseto/osf.io,cslzchen/osf.io,cwisecarver/osf.io,aaxelb/osf.io,crcresearch/osf.io,cwisecarver/osf.io,caneruguz/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,caneruguz/osf.io,chrisseto/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,icereval/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,hmoco/osf.io,crcresearch/osf.io,leb2dg/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,mattclark/osf.io,hmoco/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,felliott/osf.io,caneruguz/osf.io,leb2dg/osf.io,mfraezz/osf.io,sloria/osf.io,binoculars/osf.io,erinspace/osf.io,felliott/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,binoculars/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,adlius/osf.io,hmoco/osf.io,erinspace/osf.io,caseyrollins/osf.io,mattclark/osf.io,chennan47/osf.io,mfraezz/osf.io,chennan47/osf.io,sloria/osf.io
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
- required=False
+ required=False,
+ widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
Add checkboxselectmultiple widget for admin form
|
## Code Before:
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
## Instruction:
Add checkboxselectmultiple widget for admin form
## Code After:
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
- required=False
+ required=False,
? +
+ widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
186eaeb5b5e8d7c7f06305566729bf36424c6e77
|
grab/__init__.py
|
grab/__init__.py
|
from base import (GrabError, DataNotFound, GrabNetworkError,
GrabMisuseError, UploadContent, UploadFile,
GrabTimeoutError)
from transport.curl import GrabCurl
#from transport.urllib import GrabUrllib
from transport.selenium import GrabSelenium
from transport.requests import GrabRequests
Grab = GrabCurl
version_info = (0, 3, 21)
__version__ = '.'.join(map(str, version_info))
|
from base import (GrabError, DataNotFound, GrabNetworkError,
GrabMisuseError, UploadContent, UploadFile,
GrabTimeoutError)
from transport.curl import GrabCurl
#from transport.urllib import GrabUrllib
from transport.selenium import GrabSelenium
from transport.requests import GrabRequests
from tools.logs import default_logging
Grab = GrabCurl
version_info = (0, 3, 21)
__version__ = '.'.join(map(str, version_info))
|
Add default_logging function to package namespace
|
Add default_logging function to package namespace
|
Python
|
mit
|
DDShadoww/grab,maurobaraldi/grab,DDShadoww/grab,liorvh/grab,SpaceAppsXploration/grab,huiyi1990/grab,subeax/grab,lorien/grab,pombredanne/grab-1,giserh/grab,alihalabyah/grab,kevinlondon/grab,shaunstanislaus/grab,subeax/grab,maurobaraldi/grab,kevinlondon/grab,SpaceAppsXploration/grab,shaunstanislaus/grab,pombredanne/grab-1,codevlabs/grab,istinspring/grab,istinspring/grab,huiyi1990/grab,codevlabs/grab,raybuhr/grab,giserh/grab,liorvh/grab,raybuhr/grab,lorien/grab,subeax/grab,alihalabyah/grab
|
from base import (GrabError, DataNotFound, GrabNetworkError,
GrabMisuseError, UploadContent, UploadFile,
GrabTimeoutError)
from transport.curl import GrabCurl
#from transport.urllib import GrabUrllib
from transport.selenium import GrabSelenium
from transport.requests import GrabRequests
+ from tools.logs import default_logging
Grab = GrabCurl
version_info = (0, 3, 21)
__version__ = '.'.join(map(str, version_info))
|
Add default_logging function to package namespace
|
## Code Before:
from base import (GrabError, DataNotFound, GrabNetworkError,
GrabMisuseError, UploadContent, UploadFile,
GrabTimeoutError)
from transport.curl import GrabCurl
#from transport.urllib import GrabUrllib
from transport.selenium import GrabSelenium
from transport.requests import GrabRequests
Grab = GrabCurl
version_info = (0, 3, 21)
__version__ = '.'.join(map(str, version_info))
## Instruction:
Add default_logging function to package namespace
## Code After:
from base import (GrabError, DataNotFound, GrabNetworkError,
GrabMisuseError, UploadContent, UploadFile,
GrabTimeoutError)
from transport.curl import GrabCurl
#from transport.urllib import GrabUrllib
from transport.selenium import GrabSelenium
from transport.requests import GrabRequests
from tools.logs import default_logging
Grab = GrabCurl
version_info = (0, 3, 21)
__version__ = '.'.join(map(str, version_info))
|
from base import (GrabError, DataNotFound, GrabNetworkError,
GrabMisuseError, UploadContent, UploadFile,
GrabTimeoutError)
from transport.curl import GrabCurl
#from transport.urllib import GrabUrllib
from transport.selenium import GrabSelenium
from transport.requests import GrabRequests
+ from tools.logs import default_logging
Grab = GrabCurl
version_info = (0, 3, 21)
__version__ = '.'.join(map(str, version_info))
|
9ee00a148763c7caac1ae0d7dcb3efa496121ee7
|
lamana/__init__.py
|
lamana/__init__.py
|
__title__ = 'lamana'
__version__ = '0.4.11-dev'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
|
__title__ = 'lamana'
__version__ = '0.4.11.dev0' # PEP 440 style
##__version__ = '0.4.11-dev'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
|
Modify dev versioning; see PEP 440
|
Modify dev versioning; see PEP 440
|
Python
|
bsd-3-clause
|
par2/lamana
|
__title__ = 'lamana'
+ __version__ = '0.4.11.dev0' # PEP 440 style
- __version__ = '0.4.11-dev'
+ ##__version__ = '0.4.11-dev'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
|
Modify dev versioning; see PEP 440
|
## Code Before:
__title__ = 'lamana'
__version__ = '0.4.11-dev'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
## Instruction:
Modify dev versioning; see PEP 440
## Code After:
__title__ = 'lamana'
__version__ = '0.4.11.dev0' # PEP 440 style
##__version__ = '0.4.11-dev'
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
|
__title__ = 'lamana'
+ __version__ = '0.4.11.dev0' # PEP 440 style
- __version__ = '0.4.11-dev'
+ ##__version__ = '0.4.11-dev'
? ++
__author__ = 'P. Robinson II'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015, P. Robinson II'
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
|
b5bf31eab3fef21872ce44ada1a14aee9c3216d7
|
mlab-ns-simulator/mlabsim/tests/test_update.py
|
mlab-ns-simulator/mlabsim/tests/test_update.py
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
Update test_render_PUT_valid_parameters to be an approximate first draft.
|
Update test_render_PUT_valid_parameters to be an approximate first draft.
|
Python
|
apache-2.0
|
hellais/ooni-support,m-lab/ooni-support,m-lab/ooni-support,hellais/ooni-support
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
+ fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
+
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
+ # Mocks / components:
+ db = {}
+
# Mocks:
- m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
+ 'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
- ur = update.UpdateResource(m_db)
+ ur = update.UpdateResource(db)
ur.render_PUT(m_request)
- # Verify that m_db now stores tool_extra:
+ # Verify that m_db now stores fqdn: tool_extra:
- raise NotImplementedError('verification of m_db storage for tool_extra')
+ self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
Update test_render_PUT_valid_parameters to be an approximate first draft.
|
## Code Before:
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks:
m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(m_db)
ur.render_PUT(m_request)
# Verify that m_db now stores tool_extra:
raise NotImplementedError('verification of m_db storage for tool_extra')
## Instruction:
Update test_render_PUT_valid_parameters to be an approximate first draft.
## Code After:
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
# Mocks / components:
db = {}
# Mocks:
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
ur = update.UpdateResource(db)
ur.render_PUT(m_request)
# Verify that m_db now stores fqdn: tool_extra:
self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
import json
import urllib
from twisted.trial import unittest
import mock
from mlabsim import update
class UpdateResourceTests (unittest.TestCase):
def test_render_PUT_valid_parameters(self):
# Test data:
+ fqdn = 'mlab01.ooni-tests.not-real.except-it-actually-could-be.example.com'
+
tool_extra = {
'collector_onion': 'testfakenotreal.onion',
}
tool_extra_param = urllib.quote(json.dumps(tool_extra))
+ # Mocks / components:
+ db = {}
+
# Mocks:
- m_db = mock.MagicMock()
m_request = mock.MagicMock()
# Fake a request with sufficient parameters:
m_request.params = {
+ 'fqdn': fqdn,
'tool_extra': tool_extra_param,
}
# Execute the code under test:
- ur = update.UpdateResource(m_db)
? --
+ ur = update.UpdateResource(db)
ur.render_PUT(m_request)
- # Verify that m_db now stores tool_extra:
+ # Verify that m_db now stores fqdn: tool_extra:
? ++++++
- raise NotImplementedError('verification of m_db storage for tool_extra')
+ self.assertEqual({fqdn: {"tool_extra": tool_extra}}, db)
|
840dce03718947498e72e561e7ddca22c4174915
|
django_olcc/olcc/context_processors.py
|
django_olcc/olcc/context_processors.py
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
record = ImportRecord.objects.latest()
if record:
return {
'last_updated': record.created_at
}
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
try:
return {
'last_updated': ImportRecord.objects.latest().created_at
}
except ImportRecord.DoesNotExist:
pass
|
Fix a DoesNotExist bug in the olcc context processor.
|
Fix a DoesNotExist bug in the olcc context processor.
|
Python
|
mit
|
twaddington/django-olcc,twaddington/django-olcc,twaddington/django-olcc
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
+ try:
- record = ImportRecord.objects.latest()
- if record:
return {
- 'last_updated': record.created_at
+ 'last_updated': ImportRecord.objects.latest().created_at
}
+ except ImportRecord.DoesNotExist:
+ pass
|
Fix a DoesNotExist bug in the olcc context processor.
|
## Code Before:
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
record = ImportRecord.objects.latest()
if record:
return {
'last_updated': record.created_at
}
## Instruction:
Fix a DoesNotExist bug in the olcc context processor.
## Code After:
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
try:
return {
'last_updated': ImportRecord.objects.latest().created_at
}
except ImportRecord.DoesNotExist:
pass
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
+ try:
- record = ImportRecord.objects.latest()
- if record:
return {
- 'last_updated': record.created_at
+ 'last_updated': ImportRecord.objects.latest().created_at
? ++++ ++ +++++++++++++++++
}
+ except ImportRecord.DoesNotExist:
+ pass
|
8815507f8e334238d269468e08c22f4415e58528
|
spacy/lang/es/__init__.py
|
spacy/lang/es/__init__.py
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class Spanish(Language):
lang = 'es'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
__all__ = ['Spanish']
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class SpanishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
class Spanish(Language):
lang = 'es'
Defaults = SpanishDefaults
__all__ = ['Spanish']
|
Move SpanishDefaults out of Language class, for pickle
|
Move SpanishDefaults out of Language class, for pickle
|
Python
|
mit
|
recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
+ class SpanishDefaults(Language.Defaults):
+ lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
+ lex_attr_getters[LANG] = lambda text: 'es'
+
+ tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
+ tag_map = dict(TAG_MAP)
+ stop_words = set(STOP_WORDS)
+
+ @classmethod
+ def create_lemmatizer(cls, nlp=None):
+ return Lemmatizer(LOOKUP)
+
+
class Spanish(Language):
lang = 'es'
+ Defaults = SpanishDefaults
- class Defaults(Language.Defaults):
- lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
- lex_attr_getters[LANG] = lambda text: 'es'
-
- tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
- tag_map = dict(TAG_MAP)
- stop_words = set(STOP_WORDS)
-
- @classmethod
- def create_lemmatizer(cls, nlp=None):
- return Lemmatizer(LOOKUP)
-
__all__ = ['Spanish']
|
Move SpanishDefaults out of Language class, for pickle
|
## Code Before:
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class Spanish(Language):
lang = 'es'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
__all__ = ['Spanish']
## Instruction:
Move SpanishDefaults out of Language class, for pickle
## Code After:
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class SpanishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
class Spanish(Language):
lang = 'es'
Defaults = SpanishDefaults
__all__ = ['Spanish']
|
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
+ class SpanishDefaults(Language.Defaults):
+ lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
+ lex_attr_getters[LANG] = lambda text: 'es'
+
+ tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
+ tag_map = dict(TAG_MAP)
+ stop_words = set(STOP_WORDS)
+
+ @classmethod
+ def create_lemmatizer(cls, nlp=None):
+ return Lemmatizer(LOOKUP)
+
+
class Spanish(Language):
lang = 'es'
+ Defaults = SpanishDefaults
- class Defaults(Language.Defaults):
- lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
- lex_attr_getters[LANG] = lambda text: 'es'
-
- tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
- tag_map = dict(TAG_MAP)
- stop_words = set(STOP_WORDS)
-
- @classmethod
- def create_lemmatizer(cls, nlp=None):
- return Lemmatizer(LOOKUP)
-
__all__ = ['Spanish']
|
fa19a6ec882727bb96f27993d7ac765797c19556
|
logger/utilities.py
|
logger/utilities.py
|
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
|
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
Add a find_name utility function
|
Add a find_name utility function
|
Python
|
bsd-2-clause
|
Vgr255/logging
|
"""Small utility functions for use in various places."""
- __all__ = ["pick", "is_dunder"]
+ __all__ = ["pick", "is_dunder", "find_name"]
+
+ import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
+ def find_name(name, depth=0):
+ """Find a name in the calling frame's scopes."""
+ calling_frame = sys._getframe(depth + 2)
+ if name in calling_frame.f_locals:
+ return calling_frame.f_locals[name]
+ if name in calling_frame.f_globals:
+ return calling_frame.f_globals[name]
+ if name in calling_frame.f_builtins:
+ return calling_frame.f_builtins[name]
+
+ raise NameError("could not find {!r}".format(name))
+
|
Add a find_name utility function
|
## Code Before:
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder"]
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
## Instruction:
Add a find_name utility function
## Code After:
"""Small utility functions for use in various places."""
__all__ = ["pick", "is_dunder", "find_name"]
import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
def find_name(name, depth=0):
"""Find a name in the calling frame's scopes."""
calling_frame = sys._getframe(depth + 2)
if name in calling_frame.f_locals:
return calling_frame.f_locals[name]
if name in calling_frame.f_globals:
return calling_frame.f_globals[name]
if name in calling_frame.f_builtins:
return calling_frame.f_builtins[name]
raise NameError("could not find {!r}".format(name))
|
"""Small utility functions for use in various places."""
- __all__ = ["pick", "is_dunder"]
+ __all__ = ["pick", "is_dunder", "find_name"]
? +++++++++++++
+
+ import sys
def pick(arg, default):
"""Handler for default versus given argument."""
return default if arg is None else arg
def is_dunder(name):
"""Return True if a __dunder__ name, False otherwise."""
return name[:2] == name[-2:] == "__" and "_" not in (name[2:3], name[-3:-2])
+
+ def find_name(name, depth=0):
+ """Find a name in the calling frame's scopes."""
+ calling_frame = sys._getframe(depth + 2)
+ if name in calling_frame.f_locals:
+ return calling_frame.f_locals[name]
+ if name in calling_frame.f_globals:
+ return calling_frame.f_globals[name]
+ if name in calling_frame.f_builtins:
+ return calling_frame.f_builtins[name]
+
+ raise NameError("could not find {!r}".format(name))
|
fefde8aef88cbfb13cb1f0bfcd3ac476ad7a903c
|
spacy/download.py
|
spacy/download.py
|
from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
print("Model successfully installed.", file=sys.stderr)
|
from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
from . import util
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
data_path = util.get_data_path()
print("Model successfully installed to %s" % data_path, file=sys.stderr)
|
Make installation print data path.
|
Make installation print data path.
|
Python
|
mit
|
explosion/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,spacy-io/spaCy,spacy-io/spaCy,raphael0202/spaCy,banglakit/spaCy,explosion/spaCy,honnibal/spaCy,recognai/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,banglakit/spaCy,raphael0202/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,raphael0202/spaCy,banglakit/spaCy,explosion/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,banglakit/spaCy,spacy-io/spaCy,honnibal/spaCy,raphael0202/spaCy
|
from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
+ from . import util
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
+ data_path = util.get_data_path()
- print("Model successfully installed.", file=sys.stderr)
+ print("Model successfully installed to %s" % data_path, file=sys.stderr)
|
Make installation print data path.
|
## Code Before:
from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
print("Model successfully installed.", file=sys.stderr)
## Instruction:
Make installation print data path.
## Code After:
from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
from . import util
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
data_path = util.get_data_path()
print("Model successfully installed to %s" % data_path, file=sys.stderr)
|
from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
+ from . import util
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
+ data_path = util.get_data_path()
- print("Model successfully installed.", file=sys.stderr)
? ^
+ print("Model successfully installed to %s" % data_path, file=sys.stderr)
? ^^^^^^ ++++++++++++
|
d0380db930dbf145108a7ef0330dd19475f7fdee
|
test_arrange_schedule.py
|
test_arrange_schedule.py
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
Add test case for read_arrange_mode()
|
Add test case for read_arrange_mode()
|
Python
|
apache-2.0
|
Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
+
+ def test_read_arrange_mode():
+ keys = ['arrange_sn','arrange_mode','condition']
+ receive_msg = read_arrange_mode()
+ for key in keys:
+ assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
+ test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
Add test case for read_arrange_mode()
|
## Code Before:
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
## Instruction:
Add test case for read_arrange_mode()
## Code After:
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
+
+ def test_read_arrange_mode():
+ keys = ['arrange_sn','arrange_mode','condition']
+ receive_msg = read_arrange_mode()
+ for key in keys:
+ assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
+ test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
776c1dbda3871c2b94d849ea59db25f93bb59525
|
src/mmw/apps/water_balance/views.py
|
src/mmw/apps/water_balance/views.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.shortcuts import render_to_response
def home_page(request):
return render_to_response('home_page/index.html')
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.shortcuts import render_to_response
from django.template import RequestContext
def home_page(request):
return render_to_response('home_page/index.html', RequestContext(request))
|
Add RequestContext to Micro site
|
Add RequestContext to Micro site
This allows us to populate settings variables such as Google Analytics
codes. See original work done for #769.
Refs #920
|
Python
|
apache-2.0
|
lliss/model-my-watershed,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,lliss/model-my-watershed,project-icp/bee-pollinator-app
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.shortcuts import render_to_response
+ from django.template import RequestContext
def home_page(request):
- return render_to_response('home_page/index.html')
+ return render_to_response('home_page/index.html', RequestContext(request))
|
Add RequestContext to Micro site
|
## Code Before:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.shortcuts import render_to_response
def home_page(request):
return render_to_response('home_page/index.html')
## Instruction:
Add RequestContext to Micro site
## Code After:
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.shortcuts import render_to_response
from django.template import RequestContext
def home_page(request):
return render_to_response('home_page/index.html', RequestContext(request))
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.shortcuts import render_to_response
+ from django.template import RequestContext
def home_page(request):
- return render_to_response('home_page/index.html')
+ return render_to_response('home_page/index.html', RequestContext(request))
? ++++++++++++++++++++++++ +
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.