commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e99f7b6d25464f36accc2f04899edfa9e982bee2
|
tests/cpydiff/core_fstring_concat.py
|
tests/cpydiff/core_fstring_concat.py
|
x = 1
print("aa" f"{x}")
print(f"{x}" "ab")
print("a{}a" f"{x}")
print(f"{x}" "a{}b")
|
x, y = 1, 2
print("aa" f"{x}") # works
print(f"{x}" "ab") # works
print("a{}a" f"{x}") # fails
print(f"{x}" "a{}b") # fails
print(f"{x}" f"{y}") # fails
|
Clarify f-string diffs regarding concatenation.
|
tests/cpydiff: Clarify f-string diffs regarding concatenation.
Concatenation of any literals (including f-strings) should be avoided.
Signed-off-by: Jim Mussared <[email protected]>
|
Python
|
mit
|
adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython
|
- x = 1
+ x, y = 1, 2
- print("aa" f"{x}")
+ print("aa" f"{x}") # works
- print(f"{x}" "ab")
+ print(f"{x}" "ab") # works
- print("a{}a" f"{x}")
+ print("a{}a" f"{x}") # fails
- print(f"{x}" "a{}b")
+ print(f"{x}" "a{}b") # fails
+ print(f"{x}" f"{y}") # fails
|
Clarify f-string diffs regarding concatenation.
|
## Code Before:
x = 1
print("aa" f"{x}")
print(f"{x}" "ab")
print("a{}a" f"{x}")
print(f"{x}" "a{}b")
## Instruction:
Clarify f-string diffs regarding concatenation.
## Code After:
x, y = 1, 2
print("aa" f"{x}") # works
print(f"{x}" "ab") # works
print("a{}a" f"{x}") # fails
print(f"{x}" "a{}b") # fails
print(f"{x}" f"{y}") # fails
|
...
x, y = 1, 2
print("aa" f"{x}") # works
print(f"{x}" "ab") # works
print("a{}a" f"{x}") # fails
print(f"{x}" "a{}b") # fails
print(f"{x}" f"{y}") # fails
...
|
418e7a7d8c8261578df046d251041ab0794d1580
|
decorators.py
|
decorators.py
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
raise TypeError("Invalid argument type '%s' at position %d. " +
"Expected one of (%s)" % (
type(args[self.position]).__name__, self.position,
", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
Raise TypeError instead of returning
|
Raise TypeError instead of returning
|
Python
|
bsd-3-clause
|
rasher/reddit-modbot
|
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
- self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
- if 'returnvalue' in kwargs:
- self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
- return self.returnvalue
+ raise TypeError("Invalid argument type '%s' at position %d. " +
+ "Expected one of (%s)" % (
+ type(args[self.position]).__name__, self.position,
+ ", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
Raise TypeError instead of returning
|
## Code Before:
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
self.returnvalue = False
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
if 'returnvalue' in kwargs:
self.returnvalue = kwargs['returnvalue']
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
return self.returnvalue
return f(*args, **kwargs)
return wrapped_f
## Instruction:
Raise TypeError instead of returning
## Code After:
class RequiresType(object):
"""
Checks that the first (or position given by the keyword argument 'position'
argument to the function is an instance of one of the types given in the
positional decorator arguments
"""
def __init__(self, *types, **kwargs):
self.types = types
self.position = 0
if 'position' in kwargs:
self.position = int(kwargs['position']) - 1
def __call__(self, f):
def wrapped_f(*args, **kwargs):
if type(args[self.position]) not in self.types:
raise TypeError("Invalid argument type '%s' at position %d. " +
"Expected one of (%s)" % (
type(args[self.position]).__name__, self.position,
", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
return wrapped_f
|
# ... existing code ...
self.position = 0
if 'position' in kwargs:
# ... modified code ...
self.position = int(kwargs['position']) - 1
...
if type(args[self.position]) not in self.types:
raise TypeError("Invalid argument type '%s' at position %d. " +
"Expected one of (%s)" % (
type(args[self.position]).__name__, self.position,
", ".join([t.__name__ for t in self.types])))
return f(*args, **kwargs)
# ... rest of the code ...
|
bad8133c6714a25ad764419302f4db0da3f39952
|
spec_cleaner/rpminstall.py
|
spec_cleaner/rpminstall.py
|
import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = string.replace(line, '%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
|
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = line.replace('%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
|
Fix test failures on py3.
|
Fix test failures on py3.
|
Python
|
bsd-3-clause
|
plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,pombredanne/spec-cleaner,plusky/spec-cleaner
|
-
- import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
- line = string.replace(line, '%{makeinstall}', install_command)
+ line = line.replace('%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
|
Fix test failures on py3.
|
## Code Before:
import string
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = string.replace(line, '%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
## Instruction:
Fix test failures on py3.
## Code After:
from rpmsection import Section
class RpmInstall(Section):
'''
Remove commands that wipe out the build root.
Replace %makeinstall (suse-ism).
'''
def add(self, line):
install_command = 'make DESTDIR=%{buildroot} install %{?_smp_mflags}'
line = self._complete_cleanup(line)
line = self._replace_remove_la(line)
# we do not want to cleanup buildroot, it is already clean
if self.reg.re_clean.search(line):
return
# do not use install macros as we have trouble with it for now
# we can convert it later on
if self.reg.re_install.match(line):
line = install_command
# we can deal with additional params for %makeinstall so replace that too
line = line.replace('%{makeinstall}', install_command)
Section.add(self, line)
def _replace_remove_la(self, line):
"""
Replace all known variations of la file deletion with one unified
"""
if (self.reg.re_rm.search(line) and len(self.reg.re_rm_double.split(line)) == 1) or \
(self.reg.re_find.search(line) and len(self.reg.re_find_double.split(line)) == 2):
line = 'find %{buildroot} -type f -name "*.la" -delete -print'
return line
|
// ... existing code ...
// ... modified code ...
# we can deal with additional params for %makeinstall so replace that too
line = line.replace('%{makeinstall}', install_command)
// ... rest of the code ...
|
3c3e9b5f584c23c9359ca9dce71b89635fffd043
|
LiSE/LiSE/tests/test_load.py
|
LiSE/LiSE/tests/test_load.py
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng, kobold_pos=(9, 9))
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
eng.character['physical'].thing['kobold']['location'] = (3, 3)
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
!= eng._things_cache.keyframe['physical']['trunk'][1][tick1]
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
!= eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
|
Make test_multi_keyframe demonstrate what it's supposed to
|
Make test_multi_keyframe demonstrate what it's supposed to
I was testing a cache that wasn't behaving correctly for
unrelated reasons.
|
Python
|
agpl-3.0
|
LogicalDash/LiSE,LogicalDash/LiSE
|
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
- inittest(eng, kobold_pos=(9, 9))
+ inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
- eng.character['physical'].thing['kobold']['location'] = (3, 3)
+ del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
- assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
+ assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
- != eng._things_cache.keyframe['physical']['trunk'][1][tick1]
+ != eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
|
Make test_multi_keyframe demonstrate what it's supposed to
|
## Code Before:
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng, kobold_pos=(9, 9))
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
eng.character['physical'].thing['kobold']['location'] = (3, 3)
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._things_cache.keyframe['physical']['trunk'][0][tick0]\
!= eng._things_cache.keyframe['physical']['trunk'][1][tick1]
## Instruction:
Make test_multi_keyframe demonstrate what it's supposed to
## Code After:
import os
import shutil
import pytest
from LiSE.engine import Engine
from LiSE.examples.kobold import inittest
def test_keyframe_load_init(tempdir):
"""Can load a keyframe at start of branch, including locations"""
eng = Engine(tempdir)
inittest(eng)
eng.branch = 'new'
eng.snap_keyframe()
eng.close()
eng = Engine(tempdir)
assert 'kobold' in eng.character['physical'].thing
assert (0, 0) in eng.character['physical'].place
assert (0, 1) in eng.character['physical'].portal[0, 0]
eng.close()
def test_multi_keyframe(tempdir):
eng = Engine(tempdir)
inittest(eng)
eng.snap_keyframe()
tick0 = eng.tick
eng.turn = 1
del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
tick1 = eng.tick
eng.close()
eng = Engine(tempdir)
eng._load_at('trunk', 0, tick0+1)
assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
!= eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
|
# ... existing code ...
eng = Engine(tempdir)
inittest(eng)
eng.snap_keyframe()
# ... modified code ...
eng.turn = 1
del eng.character['physical'].place[3, 3]
eng.snap_keyframe()
...
eng._load_at('trunk', 0, tick0+1)
assert eng._nodes_cache.keyframe['physical', ]['trunk'][0][tick0]\
!= eng._nodes_cache.keyframe['physical', ]['trunk'][1][tick1]
# ... rest of the code ...
|
733404ba2eb7218bb4d253cd74fe88107ff75afc
|
test/test_live_openid_login.py
|
test/test_live_openid_login.py
|
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login():
"""
Tests login to the Stack Exchange OpenID provider.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
# This will raise an error if login fails.
browser.loginSEOpenID(
live_testing.username,
live_testing.password)
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
|
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
|
Remove successful OpenID login live test. It's redundant with our message-related live tests.
|
Remove successful OpenID login live test.
It's redundant with our message-related live tests.
|
Python
|
apache-2.0
|
ByteCommander/ChatExchange6,hichris1234/ChatExchange,Charcoal-SE/ChatExchange,hichris1234/ChatExchange,ByteCommander/ChatExchange6,Charcoal-SE/ChatExchange
|
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
- def test_openid_login():
- """
- Tests login to the Stack Exchange OpenID provider.
- """
- browser = SEChatBrowser()
-
- # avoid hitting the SE servers too frequently
- time.sleep(2)
-
- # This will raise an error if login fails.
- browser.loginSEOpenID(
- live_testing.username,
- live_testing.password)
-
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
|
Remove successful OpenID login live test. It's redundant with our message-related live tests.
|
## Code Before:
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login():
"""
Tests login to the Stack Exchange OpenID provider.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
# This will raise an error if login fails.
browser.loginSEOpenID(
live_testing.username,
live_testing.password)
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
## Instruction:
Remove successful OpenID login live test. It's redundant with our message-related live tests.
## Code After:
import time
import pytest
from chatexchange.browser import SEChatBrowser, LoginError
import live_testing
if live_testing.enabled:
def test_openid_login_recognizes_failure():
"""
Tests that failed SE OpenID logins raise errors.
"""
browser = SEChatBrowser()
# avoid hitting the SE servers too frequently
time.sleep(2)
with pytest.raises(LoginError):
invalid_password = 'no' + 't' * len(live_testing.password)
browser.loginSEOpenID(
live_testing.username,
invalid_password)
|
// ... existing code ...
if live_testing.enabled:
def test_openid_login_recognizes_failure():
// ... rest of the code ...
|
6d97b723915e5de7a008e5d7bdd44e7883967fdc
|
retdec/tools/__init__.py
|
retdec/tools/__init__.py
|
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
|
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
|
Simplify the help message for the -k/--api-key parameter.
|
Simplify the help message for the -k/--api-key parameter.
We can use the '%(default)s' placeholder instead of string formatting.
|
Python
|
mit
|
s3rvac/retdec-python
|
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
- help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
+ help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
|
Simplify the help message for the -k/--api-key parameter.
|
## Code Before:
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: {}.'.format(DEFAULT_API_URL)
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
## Instruction:
Simplify the help message for the -k/--api-key parameter.
## Code After:
"""Tools that use the library to analyze and decompile files."""
from retdec import DEFAULT_API_URL
from retdec import __version__
def _add_arguments_shared_by_all_tools(parser):
"""Adds arguments that are used by all tools to the given parser."""
parser.add_argument(
'-k', '--api-key',
dest='api_key',
metavar='KEY',
help='API key to be used.'
)
parser.add_argument(
'-u', '--api-url',
dest='api_url',
metavar='URL',
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
parser.add_argument(
'-V', '--version',
action='version',
version='%(prog)s (via retdec-python) {}'.format(__version__)
)
|
# ... existing code ...
default=DEFAULT_API_URL,
help='URL to the API. Default: %(default)s.'
)
# ... rest of the code ...
|
ea902f4002344c1cbf56dbd989c27aa1ad41a363
|
task_run_system.py
|
task_run_system.py
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
Make the run system task more error friendly
|
Make the run system task more error friendly
|
Python
|
bsd-2-clause
|
jskinn/robot-vision-experiment-framework,jskinn/robot-vision-experiment-framework
|
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
+ import core.image_collection
+ import core.image_entity
+ import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
+ system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
+ if s_system is not None:
- system = db_client.deserialize_entity(s_system)
+ system = db_client.deserialize_entity(s_system)
+ del s_system
+ image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
+ if s_image_source is not None:
- image_source = db_client.deserialize_entity(s_image_source)
+ image_source = db_client.deserialize_entity(s_image_source)
+ del s_image_source
+ if system is not None and image_source is not None:
- trial_result = trial_runner.run_system_with_source(system, image_source)
+ trial_result = trial_runner.run_system_with_source(system, image_source)
+ if trial_result is not None:
- db_client.trials_collection.insert(trial_result.serialize())
+ db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
Make the run system task more error friendly
|
## Code Before:
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
s_system = db_client.system_collection.find_one({'_id': system_id})
system = db_client.deserialize_entity(s_system)
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
image_source = db_client.deserialize_entity(s_image_source)
trial_result = trial_runner.run_system_with_source(system, image_source)
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
## Instruction:
Make the run system task more error friendly
## Code After:
import sys
import bson.objectid
import config.global_configuration as global_conf
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
def main(*args):
"""
Run a given system with a given image source.
This represents a basic task.
Scripts to run this will be autogenerated by the job system
The first argument is the system id, the second argument is the image source to use
(note that args[0] should be the x
:return:
"""
if len(args) >= 2:
system_id = bson.objectid.ObjectId(args[0])
image_source_id = bson.objectid.ObjectId(args[1])
config = global_conf.load_global_config('config.yml')
db_client = database.client.DatabaseClient(config=config)
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
if __name__ == '__main__':
main(*sys.argv[1:])
|
...
import database.client
import core.image_collection
import core.image_entity
import systems.deep_learning.keras_frcnn
import batch_analysis.trial_runner as trial_runner
...
system = None
s_system = db_client.system_collection.find_one({'_id': system_id})
if s_system is not None:
system = db_client.deserialize_entity(s_system)
del s_system
image_source = None
s_image_source = db_client.image_source_collection.find_one({'_id': image_source_id})
if s_image_source is not None:
image_source = db_client.deserialize_entity(s_image_source)
del s_image_source
if system is not None and image_source is not None:
trial_result = trial_runner.run_system_with_source(system, image_source)
if trial_result is not None:
db_client.trials_collection.insert(trial_result.serialize())
...
|
3e42af8ac949032d8dc2c4bc181a64fc2fbed651
|
downstream_node/models.py
|
downstream_node/models.py
|
from sqlalchemy import Table
|
from downstream_node.startup import db
class Files(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column('filepath', db.String())
class Challenges(db.Model):
__tablename__ = 'challenges'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column(db.ForeignKey('files.filepath'))
block = db.Column('block', db.String())
seed = db.Column('seed', db.String())
response = db.Column('response', db.String(), nullable=True)
|
Add model stuff into DB
|
Add model stuff into DB
|
Python
|
mit
|
Storj/downstream-node,Storj/downstream-node
|
+ from downstream_node.startup import db
- from sqlalchemy import Table
+
+ class Files(db.Model):
+ __tablename__ = 'files'
+
+ id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
+ filepath = db.Column('filepath', db.String())
+
+
+ class Challenges(db.Model):
+ __tablename__ = 'challenges'
+
+ id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
+ filepath = db.Column(db.ForeignKey('files.filepath'))
+ block = db.Column('block', db.String())
+ seed = db.Column('seed', db.String())
+ response = db.Column('response', db.String(), nullable=True)
+
|
Add model stuff into DB
|
## Code Before:
from sqlalchemy import Table
## Instruction:
Add model stuff into DB
## Code After:
from downstream_node.startup import db
class Files(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column('filepath', db.String())
class Challenges(db.Model):
__tablename__ = 'challenges'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column(db.ForeignKey('files.filepath'))
block = db.Column('block', db.String())
seed = db.Column('seed', db.String())
response = db.Column('response', db.String(), nullable=True)
|
// ... existing code ...
from downstream_node.startup import db
class Files(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column('filepath', db.String())
class Challenges(db.Model):
__tablename__ = 'challenges'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column(db.ForeignKey('files.filepath'))
block = db.Column('block', db.String())
seed = db.Column('seed', db.String())
response = db.Column('response', db.String(), nullable=True)
// ... rest of the code ...
|
28bc35bc8ed2646faf0d6662b54a5324c0fd1e31
|
pspec/cli.py
|
pspec/cli.py
|
from attest.hook import AssertImportHook
from docopt import docopt
import os
import sys
from .collectors import PSpecTests
def main():
arguments = docopt(__doc__)
paths = arguments['<path>']
if not paths:
paths = [name for name in os.listdir('.')
if os.path.isfile('%s/__init__.py' % name)]
with AssertImportHook():
tests = PSpecTests(paths)
tests.run()
if __name__ == '__main__':
main()
|
from attest.hook import AssertImportHook
from docopt import docopt
import os
import sys
from .collectors import PSpecTests
def main():
# When run as a console script (i.e. ``pspec``), the CWD isn't
# ``sys.path[0]``, but it should be.
cwd = os.getcwd()
if sys.path[0] not in ('', cwd):
sys.path.insert(0, cwd)
arguments = docopt(__doc__)
paths = arguments['<path>']
if not paths:
paths = [name for name in os.listdir('.')
if os.path.isfile('%s/__init__.py' % name)]
with AssertImportHook():
tests = PSpecTests(paths)
tests.run()
if __name__ == '__main__':
main()
|
Put CWD at start of sys.path
|
Put CWD at start of sys.path
|
Python
|
bsd-3-clause
|
bfirsh/pspec
|
from attest.hook import AssertImportHook
from docopt import docopt
import os
import sys
from .collectors import PSpecTests
def main():
+ # When run as a console script (i.e. ``pspec``), the CWD isn't
+ # ``sys.path[0]``, but it should be.
+ cwd = os.getcwd()
+ if sys.path[0] not in ('', cwd):
+ sys.path.insert(0, cwd)
+
arguments = docopt(__doc__)
paths = arguments['<path>']
if not paths:
paths = [name for name in os.listdir('.')
if os.path.isfile('%s/__init__.py' % name)]
with AssertImportHook():
tests = PSpecTests(paths)
tests.run()
if __name__ == '__main__':
main()
|
Put CWD at start of sys.path
|
## Code Before:
from attest.hook import AssertImportHook
from docopt import docopt
import os
import sys
from .collectors import PSpecTests
def main():
arguments = docopt(__doc__)
paths = arguments['<path>']
if not paths:
paths = [name for name in os.listdir('.')
if os.path.isfile('%s/__init__.py' % name)]
with AssertImportHook():
tests = PSpecTests(paths)
tests.run()
if __name__ == '__main__':
main()
## Instruction:
Put CWD at start of sys.path
## Code After:
from attest.hook import AssertImportHook
from docopt import docopt
import os
import sys
from .collectors import PSpecTests
def main():
# When run as a console script (i.e. ``pspec``), the CWD isn't
# ``sys.path[0]``, but it should be.
cwd = os.getcwd()
if sys.path[0] not in ('', cwd):
sys.path.insert(0, cwd)
arguments = docopt(__doc__)
paths = arguments['<path>']
if not paths:
paths = [name for name in os.listdir('.')
if os.path.isfile('%s/__init__.py' % name)]
with AssertImportHook():
tests = PSpecTests(paths)
tests.run()
if __name__ == '__main__':
main()
|
...
def main():
# When run as a console script (i.e. ``pspec``), the CWD isn't
# ``sys.path[0]``, but it should be.
cwd = os.getcwd()
if sys.path[0] not in ('', cwd):
sys.path.insert(0, cwd)
arguments = docopt(__doc__)
...
|
7e1ed9cca3e02488d8d189d22e6fca35c0bec108
|
xmantissa/test/test_siteroot.py
|
xmantissa/test/test_siteroot.py
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
Fix boken test - install WebSite before trying to locateChild
|
Fix boken test - install WebSite before trying to locateChild
|
Python
|
mit
|
twisted/mantissa,twisted/mantissa,twisted/mantissa
|
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
+ ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
Fix boken test - install WebSite before trying to locateChild
|
## Code Before:
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
## Instruction:
Fix boken test - install WebSite before trying to locateChild
## Code After:
from twisted.trial import unittest
from axiom.store import Store
from axiom.item import Item
from axiom.attributes import text
from xmantissa.website import PrefixURLMixin, WebSite
from xmantissa.ixmantissa import ISiteRootPlugin
from zope.interface import implements
class Dummy:
def __init__(self, pfx):
self.pfx = pfx
class PrefixTester(Item, PrefixURLMixin):
implements(ISiteRootPlugin)
typeName = 'test_prefix_widget'
schemaVersion = 1
prefixURL = text()
def createResource(self):
return Dummy(self.prefixURL)
class SiteRootTest(unittest.TestCase):
def testPrefixPriorityMath(self):
s = Store()
PrefixTester(store=s,
prefixURL=u"hello").installOn(s)
PrefixTester(store=s,
prefixURL=u"").installOn(s)
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
self.assertEquals(res.pfx, 'hello')
self.assertEquals(segs, ())
res, segs = ws.locateChild(None, ('',))
self.assertEquals(res.pfx, '')
self.assertEquals(segs, ('',))
|
// ... existing code ...
ws = WebSite(store=s)
ws.installOn(s)
res, segs = ws.locateChild(None, ('hello',))
// ... rest of the code ...
|
4f84482803049b40d7b7da26d9d624a6a63b4820
|
core/utils.py
|
core/utils.py
|
from django.utils import timezone
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = '{} hour{}'.format(h, 's' if h > 1 else '')
if m > 0 and precision != 'h':
duration += '{}{} minute{}'.format(
'' if duration == '' else ', ', m, 's' if m > 1 else '')
if s > 0 and precision != 'h' and precision != 'm':
duration += '{}{} second{}'.format(
'' if duration == '' else ', ', s, 's' if s > 1 else '')
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
from django.utils import timezone
from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
Add translation support to `duration_string` utility
|
Add translation support to `duration_string` utility
|
Python
|
bsd-2-clause
|
cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy
|
from django.utils import timezone
+ from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
- duration = '{} hour{}'.format(h, 's' if h > 1 else '')
+ duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
+ 'hours': h
+ }
if m > 0 and precision != 'h':
- duration += '{}{} minute{}'.format(
- '' if duration == '' else ', ', m, 's' if m > 1 else '')
+ if duration != '':
+ duration += ', '
+ duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
+ 'minutes': m
+ }
if s > 0 and precision != 'h' and precision != 'm':
- duration += '{}{} second{}'.format(
- '' if duration == '' else ', ', s, 's' if s > 1 else '')
+ if duration != '':
+ duration += ', '
+ duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
+ 'seconds': s
+ }
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
Add translation support to `duration_string` utility
|
## Code Before:
from django.utils import timezone
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = '{} hour{}'.format(h, 's' if h > 1 else '')
if m > 0 and precision != 'h':
duration += '{}{} minute{}'.format(
'' if duration == '' else ', ', m, 's' if m > 1 else '')
if s > 0 and precision != 'h' and precision != 'm':
duration += '{}{} second{}'.format(
'' if duration == '' else ', ', s, 's' if s > 1 else '')
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
## Instruction:
Add translation support to `duration_string` utility
## Code After:
from django.utils import timezone
from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
// ... existing code ...
from django.utils import timezone
from django.utils.translation import ngettext
// ... modified code ...
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
// ... rest of the code ...
|
bc8e064e41d43a4579c8111f1480b55e660ca186
|
pep8ify/fixes/fix_tabs.py
|
pep8ify/fixes/fix_tabs.py
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
SPACES = ' ' * 4
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
Use globally define number of spaces.
|
Clean-up: Use globally define number of spaces.
|
Python
|
apache-2.0
|
spulec/pep8ify
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
- SPACES = ' ' * 4
+ from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
Use globally define number of spaces.
|
## Code Before:
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
SPACES = ' ' * 4
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
## Instruction:
Use globally define number of spaces.
## Code After:
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
# ... existing code ...
from .utils import SPACES
# ... rest of the code ...
|
b19951bcf2035c9e755ad731e4f5081cf5f0d46f
|
troposphere/codeguruprofiler.py
|
troposphere/codeguruprofiler.py
|
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
|
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
|
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
+ 'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
|
## Code Before:
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
## Instruction:
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
## Code After:
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
# ... existing code ...
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
# ... rest of the code ...
|
9c48cd08ee0805cfd9a8115d77da139e8c09d7a9
|
plyer/platforms/linux/cpu.py
|
plyer/platforms/linux/cpu.py
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
Add CPU count for GNU/Linux
|
Add CPU count for GNU/Linux
|
Python
|
mit
|
kivy/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer
|
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
+ physical = [] # list of CPU ids from kernel
+ # open Linux kernel data file for CPU
+ with open('/proc/cpuinfo', 'rb') as fle:
+ lines = fle.readlines()
+ # go through the lines and obtain CPU core ids
+ for line in lines:
+ line = line.decode('utf-8')
+ if 'core id' not in line:
+ continue
+ cpuid = line.split(':')[1].strip()
+ physical.append(cpuid)
+ # total cores (socket * core per socket)
+ # is the length of unique CPU ids from kernel
+ physical = len(set(physical))
+ cpus['physical'] = physical
+
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
- environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
+
+ environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
Add CPU count for GNU/Linux
|
## Code Before:
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
environ['LANG'] = old_lang
if output:
cpus['logical'] = int(output)
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
## Instruction:
Add CPU count for GNU/Linux
## Code After:
from subprocess import Popen, PIPE
from plyer.facades import CPU
from plyer.utils import whereis_exe
from os import environ
class LinuxProcessors(CPU):
def _cpus(self):
old_lang = environ.get('LANG', '')
environ['LANG'] = 'C'
cpus = {
'physical': None, # cores
'logical': None # cores * threads
}
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
['nproc', '--all'],
stdout=PIPE
)
output = logical.communicate()[0].decode('utf-8').strip()
if output:
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
def instance():
import sys
if whereis_exe('nproc'):
return LinuxProcessors()
sys.stderr.write("nproc not found.")
return CPU()
|
# ... existing code ...
physical = [] # list of CPU ids from kernel
# open Linux kernel data file for CPU
with open('/proc/cpuinfo', 'rb') as fle:
lines = fle.readlines()
# go through the lines and obtain CPU core ids
for line in lines:
line = line.decode('utf-8')
if 'core id' not in line:
continue
cpuid = line.split(':')[1].strip()
physical.append(cpuid)
# total cores (socket * core per socket)
# is the length of unique CPU ids from kernel
physical = len(set(physical))
cpus['physical'] = physical
logical = Popen(
# ... modified code ...
if output:
...
cpus['logical'] = int(output)
environ['LANG'] = old_lang
return cpus
# ... rest of the code ...
|
fba983fa54691fcde0de93d6519b3906dff3cb32
|
sara_flexbe_states/src/sara_flexbe_states/get_distance2D.py
|
sara_flexbe_states/src/sara_flexbe_states/get_distance2D.py
|
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
|
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
|
Correct call to super constructor
|
Correct call to super constructor
|
Python
|
bsd-3-clause
|
WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors
|
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
- super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
+ super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
|
Correct call to super constructor
|
## Code Before:
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
## Instruction:
Correct call to super constructor
## Code After:
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
|
...
super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
...
|
b33b063e49b394265bc890f6d3b39da08e355416
|
blogs/tests/test_parser.py
|
blogs/tests/test_parser.py
|
from unittest import TestCase
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(TestCase):
def setUp(self):
self.test_file_path = get_test_rss_path()
self.entries = get_all_entries("file://{}".format(self.test_file_path))
def test_entries(self):
""" Make sure we can parse RSS entries """
self.assertEqual(len(self.entries), 25)
|
import datetime
import unittest
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_file_path = get_test_rss_path()
cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
def test_entries(self):
self.assertEqual(len(self.entries), 25)
self.assertEqual(
self.entries[0]['title'],
'Introducing Electronic Contributor Agreements'
)
self.assertIn(
"We're happy to announce the new way to file a contributor "
"agreement: on the web at",
self.entries[0]['summary']
)
self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
self.assertEqual(
self.entries[0]['url'],
'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
)
|
Add some tests to make sure we can parse RSS feeds
|
Add some tests to make sure we can parse RSS feeds
|
Python
|
apache-2.0
|
manhhomienbienthuy/pythondotorg,proevo/pythondotorg,manhhomienbienthuy/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,python/pythondotorg,manhhomienbienthuy/pythondotorg,python/pythondotorg,Mariatta/pythondotorg,manhhomienbienthuy/pythondotorg,Mariatta/pythondotorg,python/pythondotorg,proevo/pythondotorg,python/pythondotorg
|
- from unittest import TestCase
+ import datetime
+ import unittest
from ..parser import get_all_entries
from .utils import get_test_rss_path
- class BlogParserTest(TestCase):
+ class BlogParserTest(unittest.TestCase):
+ @classmethod
- def setUp(self):
+ def setUpClass(cls):
- self.test_file_path = get_test_rss_path()
+ cls.test_file_path = get_test_rss_path()
- self.entries = get_all_entries("file://{}".format(self.test_file_path))
+ cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
def test_entries(self):
- """ Make sure we can parse RSS entries """
self.assertEqual(len(self.entries), 25)
+ self.assertEqual(
+ self.entries[0]['title'],
+ 'Introducing Electronic Contributor Agreements'
+ )
+ self.assertIn(
+ "We're happy to announce the new way to file a contributor "
+ "agreement: on the web at",
+ self.entries[0]['summary']
+ )
+ self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
+ self.assertEqual(
+ self.entries[0]['url'],
+ 'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
+ )
|
Add some tests to make sure we can parse RSS feeds
|
## Code Before:
from unittest import TestCase
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(TestCase):
def setUp(self):
self.test_file_path = get_test_rss_path()
self.entries = get_all_entries("file://{}".format(self.test_file_path))
def test_entries(self):
""" Make sure we can parse RSS entries """
self.assertEqual(len(self.entries), 25)
## Instruction:
Add some tests to make sure we can parse RSS feeds
## Code After:
import datetime
import unittest
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_file_path = get_test_rss_path()
cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
def test_entries(self):
self.assertEqual(len(self.entries), 25)
self.assertEqual(
self.entries[0]['title'],
'Introducing Electronic Contributor Agreements'
)
self.assertIn(
"We're happy to announce the new way to file a contributor "
"agreement: on the web at",
self.entries[0]['summary']
)
self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
self.assertEqual(
self.entries[0]['url'],
'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
)
|
// ... existing code ...
import datetime
import unittest
// ... modified code ...
class BlogParserTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_file_path = get_test_rss_path()
cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
...
def test_entries(self):
self.assertEqual(len(self.entries), 25)
self.assertEqual(
self.entries[0]['title'],
'Introducing Electronic Contributor Agreements'
)
self.assertIn(
"We're happy to announce the new way to file a contributor "
"agreement: on the web at",
self.entries[0]['summary']
)
self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
self.assertEqual(
self.entries[0]['url'],
'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
)
// ... rest of the code ...
|
cef6f3cce4a942bea53d6bae639dcd48d680d05a
|
gpytorch/means/linear_mean.py
|
gpytorch/means/linear_mean.py
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
Fix LinearMean bias when bias=False
|
Fix LinearMean bias when bias=False
|
Python
|
mit
|
jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
+ else:
+ self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
Fix LinearMean bias when bias=False
|
## Code Before:
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
## Instruction:
Fix LinearMean bias when bias=False
## Code After:
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
// ... existing code ...
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
// ... rest of the code ...
|
411decbdb193b28bb3060e02e81bfa29483e85a9
|
staticgen_demo/blog/staticgen_views.py
|
staticgen_demo/blog/staticgen_views.py
|
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
Remove debug code from staticgen views.
|
Remove debug code from staticgen views.
|
Python
|
bsd-3-clause
|
mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo
|
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
- def _get_paginator(self, url):
- response = self.client.get(url)
- print 'status_code: %s' % response.status_code
- if not response.status_code == 200:
- pass
- else:
- context = {}
- if hasattr(response, 'context_data'):
- context = response.context_data
- elif hasattr(response, 'context'):
- context = response.context
-
- print context
- try:
- return context['paginator'], context['is_paginated']
- except KeyError:
- pass
- return None, False
-
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
Remove debug code from staticgen views.
|
## Code Before:
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
def _get_paginator(self, url):
response = self.client.get(url)
print 'status_code: %s' % response.status_code
if not response.status_code == 200:
pass
else:
context = {}
if hasattr(response, 'context_data'):
context = response.context_data
elif hasattr(response, 'context'):
context = response.context
print context
try:
return context['paginator'], context['is_paginated']
except KeyError:
pass
return None, False
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
## Instruction:
Remove debug code from staticgen views.
## Code After:
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
from .models import Post
class BlogPostListView(StaticgenView):
is_paginated = True
i18n = True
def items(self):
return ('blog:posts_list', )
class BlogPostDetailView(StaticgenView):
i18n = True
def items(self):
return Post.objects.all()
staticgen_pool.register(BlogPostListView)
staticgen_pool.register(BlogPostDetailView)
|
// ... existing code ...
// ... rest of the code ...
|
c2364bfe321bc19ab2d648fc77c8111522654237
|
adhocracy/migration/versions/032_remove_comment_title.py
|
adhocracy/migration/versions/032_remove_comment_title.py
|
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
old_revision_table = Table('revision', metadata,
Column('id', Integer, primary_key=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('text', UnicodeText(), nullable=False),
Column('sentiment', Integer, default=0),
Column('user_id', Integer, ForeignKey('user.id'), nullable=False),
Column('comment_id', Integer, ForeignKey('comment.id'), nullable=False),
Column('title', Unicode(255), nullable=True)
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
revisions_table = Table('revision', metadata, autoload=True)
q = migrate_engine.execute(revisions_table.select())
for (id, _, text, _, _, _, title) in q:
title = title and title.strip() or ''
if len(title) < 5:
continue
if title.startswith('Re: '):
continue
new_text = ('**%(title)s**\n'
'\n'
'%(text)s') % {'title': title,
'text': text}
update_statement = revisions_table.update(
revisions_table.c.id == id, {'text': new_text})
migrate_engine.execute(update_statement)
revisions_table.c.title.drop()
raise Exception('ksjdfkl')
def downgrade(migrate_engine):
raise NotImplementedError()
|
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
old_revision_table = Table('revision', metadata,
Column('id', Integer, primary_key=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('text', UnicodeText(), nullable=False),
Column('sentiment', Integer, default=0),
Column('user_id', Integer, ForeignKey('user.id'), nullable=False),
Column('comment_id', Integer, ForeignKey('comment.id'), nullable=False),
Column('title', Unicode(255), nullable=True)
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
revisions_table = Table('revision', metadata, autoload=True)
q = migrate_engine.execute(revisions_table.select())
for (id, _, text, _, _, _, title) in q:
title = title and title.strip() or ''
if len(title) < 5:
continue
if title.startswith('Re: '):
continue
new_text = ('**%(title)s**\n'
'\n'
'%(text)s') % {'title': title,
'text': text}
update_statement = revisions_table.update(
revisions_table.c.id == id, {'text': new_text})
migrate_engine.execute(update_statement)
revisions_table.c.title.drop()
def downgrade(migrate_engine):
raise NotImplementedError()
|
Remove silly exception inserted for testing
|
Remove silly exception inserted for testing
|
Python
|
agpl-3.0
|
SysTheron/adhocracy,DanielNeugebauer/adhocracy,SysTheron/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,SysTheron/adhocracy,alkadis/vcv,liqd/adhocracy,alkadis/vcv,phihag/adhocracy,liqd/adhocracy,alkadis/vcv,liqd/adhocracy,phihag/adhocracy,liqd/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,phihag/adhocracy
|
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
old_revision_table = Table('revision', metadata,
Column('id', Integer, primary_key=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('text', UnicodeText(), nullable=False),
Column('sentiment', Integer, default=0),
Column('user_id', Integer, ForeignKey('user.id'), nullable=False),
Column('comment_id', Integer, ForeignKey('comment.id'), nullable=False),
Column('title', Unicode(255), nullable=True)
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
revisions_table = Table('revision', metadata, autoload=True)
q = migrate_engine.execute(revisions_table.select())
for (id, _, text, _, _, _, title) in q:
title = title and title.strip() or ''
if len(title) < 5:
continue
if title.startswith('Re: '):
continue
new_text = ('**%(title)s**\n'
'\n'
'%(text)s') % {'title': title,
'text': text}
update_statement = revisions_table.update(
revisions_table.c.id == id, {'text': new_text})
migrate_engine.execute(update_statement)
revisions_table.c.title.drop()
- raise Exception('ksjdfkl')
def downgrade(migrate_engine):
raise NotImplementedError()
|
Remove silly exception inserted for testing
|
## Code Before:
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
old_revision_table = Table('revision', metadata,
Column('id', Integer, primary_key=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('text', UnicodeText(), nullable=False),
Column('sentiment', Integer, default=0),
Column('user_id', Integer, ForeignKey('user.id'), nullable=False),
Column('comment_id', Integer, ForeignKey('comment.id'), nullable=False),
Column('title', Unicode(255), nullable=True)
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
revisions_table = Table('revision', metadata, autoload=True)
q = migrate_engine.execute(revisions_table.select())
for (id, _, text, _, _, _, title) in q:
title = title and title.strip() or ''
if len(title) < 5:
continue
if title.startswith('Re: '):
continue
new_text = ('**%(title)s**\n'
'\n'
'%(text)s') % {'title': title,
'text': text}
update_statement = revisions_table.update(
revisions_table.c.id == id, {'text': new_text})
migrate_engine.execute(update_statement)
revisions_table.c.title.drop()
raise Exception('ksjdfkl')
def downgrade(migrate_engine):
raise NotImplementedError()
## Instruction:
Remove silly exception inserted for testing
## Code After:
from datetime import datetime
from sqlalchemy import MetaData, Column, ForeignKey, Table
from sqlalchemy import DateTime, Integer, Unicode, UnicodeText
metadata = MetaData()
old_revision_table = Table('revision', metadata,
Column('id', Integer, primary_key=True),
Column('create_time', DateTime, default=datetime.utcnow),
Column('text', UnicodeText(), nullable=False),
Column('sentiment', Integer, default=0),
Column('user_id', Integer, ForeignKey('user.id'), nullable=False),
Column('comment_id', Integer, ForeignKey('comment.id'), nullable=False),
Column('title', Unicode(255), nullable=True)
)
def upgrade(migrate_engine):
metadata.bind = migrate_engine
revisions_table = Table('revision', metadata, autoload=True)
q = migrate_engine.execute(revisions_table.select())
for (id, _, text, _, _, _, title) in q:
title = title and title.strip() or ''
if len(title) < 5:
continue
if title.startswith('Re: '):
continue
new_text = ('**%(title)s**\n'
'\n'
'%(text)s') % {'title': title,
'text': text}
update_statement = revisions_table.update(
revisions_table.c.id == id, {'text': new_text})
migrate_engine.execute(update_statement)
revisions_table.c.title.drop()
def downgrade(migrate_engine):
raise NotImplementedError()
|
# ... existing code ...
revisions_table.c.title.drop()
# ... rest of the code ...
|
3039b00e761f02eb0586dad51049377a31329491
|
reggae/reflect.py
|
reggae/reflect.py
|
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [m.__file__ for m in finder.modules.values()]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [os.path.abspath(m.__file__) for
m in finder.modules.values() if m.__file__ is not None]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
Use absolute paths for dependencies
|
Use absolute paths for dependencies
|
Python
|
bsd-3-clause
|
atilaneves/reggae-python
|
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
- all_module_paths = [m.__file__ for m in finder.modules.values()]
+ all_module_paths = [os.path.abspath(m.__file__) for
+ m in finder.modules.values() if m.__file__ is not None]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
Use absolute paths for dependencies
|
## Code Before:
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [m.__file__ for m in finder.modules.values()]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
## Instruction:
Use absolute paths for dependencies
## Code After:
from __future__ import (unicode_literals, division,
absolute_import, print_function)
from reggae.build import Build, DefaultOptions
from inspect import getmembers
def get_build(module):
builds = [v for n, v in getmembers(module) if isinstance(v, Build)]
assert len(builds) == 1
return builds[0]
def get_default_options(module):
opts = [v for n, v in getmembers(module) if isinstance(v, DefaultOptions)]
assert len(opts) == 1 or len(opts) == 0
return opts[0] if len(opts) else None
def get_dependencies(module):
from modulefinder import ModuleFinder
import os
finder = ModuleFinder()
finder.run_script(module)
all_module_paths = [os.path.abspath(m.__file__) for
m in finder.modules.values() if m.__file__ is not None]
def is_in_same_path(p):
return p and os.path.dirname(p).startswith(os.path.dirname(module))
return [x for x in all_module_paths if is_in_same_path(x) and x != module]
|
// ... existing code ...
finder.run_script(module)
all_module_paths = [os.path.abspath(m.__file__) for
m in finder.modules.values() if m.__file__ is not None]
// ... rest of the code ...
|
b2b1c2b8543cae37990262b2a811a9b0f26327da
|
arm/utils/locker.py
|
arm/utils/locker.py
|
from kvs import CacheKvs
class Locker(object):
"""
locker for move the locker
"""
LOCKER_KEY = 'locker'
EXPIRES = 5 # 5 sec
def __init__(self, key=None):
self.key = self.LOCKER_KEY
if key:
self.key += '.{}'.format(key)
self.locker = CacheKvs(self.key)
def lock(self):
self.locker.set('locked', expires=self.EXPIRES, nx=True)
def unlock(self):
self.locker.delete()
def is_lock(self):
return self.locker.get() == 'locked'
def on_lock(self, func):
def wrapper(*args, **kwargs):
if self.is_lock():
return
self.lock()
try:
return func(*args, **kwargs)
except Exception as e:
raise e
finally:
self.unlock()
return wrapper
|
from kvs import CacheKvs
class Locker(object):
"""
locker for move the locker
"""
LOCKER_KEY = 'locker'
EXPIRES = 5 # 5 sec
def __init__(self, key=None):
self.key = self.LOCKER_KEY
if key:
self.key += '.{}'.format(key)
self.locker = CacheKvs(self.key)
def lock(self):
self.locker.set('locked', expires=self.EXPIRES, nx=True)
def unlock(self):
self.locker.delete()
def is_lock(self):
return self.locker.get() == 'locked'
def on_lock(self, func):
def wrapper(*args, **kwargs):
if self.lock():
try:
return func(*args, **kwargs)
except Exception as e:
raise e
finally:
self.unlock()
return wrapper
|
Fix redis lock, use SETNX
|
Fix redis lock, use SETNX
|
Python
|
mit
|
mapler/tuesday,mapler/tuesday,mapler/tuesday
|
from kvs import CacheKvs
class Locker(object):
"""
locker for move the locker
"""
LOCKER_KEY = 'locker'
EXPIRES = 5 # 5 sec
def __init__(self, key=None):
self.key = self.LOCKER_KEY
if key:
self.key += '.{}'.format(key)
self.locker = CacheKvs(self.key)
def lock(self):
self.locker.set('locked', expires=self.EXPIRES, nx=True)
def unlock(self):
self.locker.delete()
def is_lock(self):
return self.locker.get() == 'locked'
def on_lock(self, func):
def wrapper(*args, **kwargs):
- if self.is_lock():
+ if self.lock():
- return
- self.lock()
- try:
+ try:
- return func(*args, **kwargs)
+ return func(*args, **kwargs)
- except Exception as e:
+ except Exception as e:
- raise e
+ raise e
- finally:
+ finally:
- self.unlock()
+ self.unlock()
return wrapper
|
Fix redis lock, use SETNX
|
## Code Before:
from kvs import CacheKvs
class Locker(object):
"""
locker for move the locker
"""
LOCKER_KEY = 'locker'
EXPIRES = 5 # 5 sec
def __init__(self, key=None):
self.key = self.LOCKER_KEY
if key:
self.key += '.{}'.format(key)
self.locker = CacheKvs(self.key)
def lock(self):
self.locker.set('locked', expires=self.EXPIRES, nx=True)
def unlock(self):
self.locker.delete()
def is_lock(self):
return self.locker.get() == 'locked'
def on_lock(self, func):
def wrapper(*args, **kwargs):
if self.is_lock():
return
self.lock()
try:
return func(*args, **kwargs)
except Exception as e:
raise e
finally:
self.unlock()
return wrapper
## Instruction:
Fix redis lock, use SETNX
## Code After:
from kvs import CacheKvs
class Locker(object):
"""
locker for move the locker
"""
LOCKER_KEY = 'locker'
EXPIRES = 5 # 5 sec
def __init__(self, key=None):
self.key = self.LOCKER_KEY
if key:
self.key += '.{}'.format(key)
self.locker = CacheKvs(self.key)
def lock(self):
self.locker.set('locked', expires=self.EXPIRES, nx=True)
def unlock(self):
self.locker.delete()
def is_lock(self):
return self.locker.get() == 'locked'
def on_lock(self, func):
def wrapper(*args, **kwargs):
if self.lock():
try:
return func(*args, **kwargs)
except Exception as e:
raise e
finally:
self.unlock()
return wrapper
|
// ... existing code ...
def wrapper(*args, **kwargs):
if self.lock():
try:
return func(*args, **kwargs)
except Exception as e:
raise e
finally:
self.unlock()
return wrapper
// ... rest of the code ...
|
2d9d3e5a0a904a52e8b97bdb64e59f455d15b6e8
|
migrations/versions/1815829d365_.py
|
migrations/versions/1815829d365_.py
|
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
Add geometry_application_reference to new unique index.
|
Add geometry_application_reference to new unique index.
|
Python
|
mit
|
LandRegistry/system-of-record,LandRegistry/system-of-record
|
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
- # create new unique index to include geo app ref
+ # create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
Add geometry_application_reference to new unique index.
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geo app ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
## Instruction:
Add geometry_application_reference to new unique index.
## Code After:
# revision identifiers, used by Alembic.
revision = '1815829d365'
down_revision = '3fcddd64a72'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
op.execute("CREATE UNIQUE INDEX title_abr_geo_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'), (record->'data'->>'geometry_application_reference'))")
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.execute("DROP INDEX title_abr_geo_idx")
op.execute("CREATE UNIQUE INDEX title_abr_idx ON records((record->'data'->>'title_number'),(record->'data'->>'application_reference'))")
### end Alembic commands ###
|
...
### commands auto generated by Alembic - please adjust! ###
# create new unique index to include geometry_application_ref
op.execute("DROP INDEX title_abr_idx")
...
|
09506e7ae8dbc1ad06b35c075e15946dd2c6092b
|
examples/my_test_suite.py
|
examples/my_test_suite.py
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
Update the example test suite
|
Update the example test suite
|
Python
|
mit
|
possoumous/Watchers,mdmintz/SeleniumBase,possoumous/Watchers,seleniumbase/SeleniumBase,mdmintz/seleniumspot,ktp420/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,ktp420/SeleniumBase,ktp420/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,possoumous/Watchers,possoumous/Watchers,mdmintz/SeleniumBase
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
+ self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
Update the example test suite
|
## Code Before:
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
## Instruction:
Update the example test suite
## Code After:
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
// ... existing code ...
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
// ... rest of the code ...
|
c6d50c3feed444f8f450c5c140e8470c6897f2bf
|
societies/models.py
|
societies/models.py
|
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
|
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return self.name
def __repr__(self):
return 'GuitarSociety("{}")'.format(self.name)
|
Make the Guitar Society __str__ Method a bit more Logical
|
Make the Guitar Society __str__ Method a bit more Logical
|
Python
|
bsd-3-clause
|
chrisguitarguy/GuitarSocieties.org,chrisguitarguy/GuitarSocieties.org
|
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
- return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
+ return self.name
+ def __repr__(self):
+ return 'GuitarSociety("{}")'.format(self.name)
+
|
Make the Guitar Society __str__ Method a bit more Logical
|
## Code Before:
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
## Instruction:
Make the Guitar Society __str__ Method a bit more Logical
## Code After:
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return self.name
def __repr__(self):
return 'GuitarSociety("{}")'.format(self.name)
|
...
def __str__(self):
return self.name
def __repr__(self):
return 'GuitarSociety("{}")'.format(self.name)
...
|
34812fe2deec64229efd4119640f3c2ddf0ed415
|
visualize.py
|
visualize.py
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
Make the sprint layout a bit easier to look at
|
Make the sprint layout a bit easier to look at
|
Python
|
mit
|
jacksontj/dnms,jacksontj/dnms
|
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
- nodes = {}
-
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
- nx.draw_networkx(g, with_labels=True)
-
- # add labels
- #nx.draw_networkx_labels(g, pos, labels['nodes'])
- #nx.draw_networkx_edge_labels(g, pos, labels['edges'])
+ pos = nx.drawing.spring_layout(
+ g,
+ scale=10.0,
+ )
+ nx.draw_networkx(
+ g,
+ pos=pos,
+ with_labels=True,
+ font_size=8,
+ )
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
Make the sprint layout a bit easier to look at
|
## Code Before:
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
## Instruction:
Make the sprint layout a bit easier to look at
## Code After:
'''
Create a visual representation of the various DAGs defined
'''
import sys
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
# write out the graph
plt.savefig(
'topology.png',
dpi=400.0,
)
plt.show() # in case people have the required libraries to make it happen
|
// ... existing code ...
for routeKey, routeMap in requests.get(sys.argv[1]).json().iteritems():
// ... modified code ...
pos = nx.drawing.spring_layout(
g,
scale=10.0,
)
nx.draw_networkx(
g,
pos=pos,
with_labels=True,
font_size=8,
)
// ... rest of the code ...
|
d37f91f50dd6c0c3202258daca95ee6ee111688f
|
pyjswidgets/pyjamas/ui/Focus.oldmoz.py
|
pyjswidgets/pyjamas/ui/Focus.oldmoz.py
|
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
ensureFocusHandler()
return createFocusable0()
|
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
return createFocusable0(ensureFocusHandler())
|
Fix for IE 11 (Focus)
|
Fix for IE 11 (Focus)
IE presents itself as mozilla, so it trips on the bug.
|
Python
|
apache-2.0
|
gpitel/pyjs,spaceone/pyjs,lancezlin/pyjs,spaceone/pyjs,lancezlin/pyjs,pombredanne/pyjs,pyjs/pyjs,Hasimir/pyjs,gpitel/pyjs,pyjs/pyjs,spaceone/pyjs,pyjs/pyjs,Hasimir/pyjs,lancezlin/pyjs,pyjs/pyjs,pombredanne/pyjs,gpitel/pyjs,Hasimir/pyjs,gpitel/pyjs,spaceone/pyjs,pombredanne/pyjs,lancezlin/pyjs,pombredanne/pyjs,Hasimir/pyjs
|
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
+ return createFocusable0(ensureFocusHandler())
- ensureFocusHandler()
- return createFocusable0()
-
|
Fix for IE 11 (Focus)
|
## Code Before:
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
ensureFocusHandler()
return createFocusable0()
## Instruction:
Fix for IE 11 (Focus)
## Code After:
def ensureFocusHandler():
JS("""
return (focusHandler !== null) ? focusHandler : (focusHandler =
@{{createFocusHandler}}());
""")
def createFocusHandler():
JS("""
return function(evt) {
// This function is called directly as an event handler, so 'this' is
// set up by the browser to be the input on which the event is fired. We
// call focus() in a timeout or the element may be blurred when this event
// ends.
var div = this['parentNode'];
if (div['onfocus']) {
$wnd['setTimeout'](function() {
div['focus']();
}, 0);
}
};
""")
def createFocusable0(focusHandler):
JS("""
var div = $doc['createElement']('div');
div['tabIndex'] = 0;
var input = $doc['createElement']('input');
input['type'] = 'text';
input['style']['opacity'] = 0;
input['tabIndex'] = -1;
input['style']['zIndex'] = -1;
input['style']['width'] = '1px';
input['style']['height'] = '1px';
input['style']['overflow'] = 'hidden';
input['style']['position'] = 'absolute';
input['addEventListener']( 'focus', focusHandler, false);
div['appendChild'](input);
return div;
""")
def createFocusable():
return createFocusable0(ensureFocusHandler())
|
// ... existing code ...
def createFocusable():
return createFocusable0(ensureFocusHandler())
// ... rest of the code ...
|
acaacbea4fbfdcc0f1f0c5e0aa9a837dee439d08
|
saau/sections/image_provider.py
|
saau/sections/image_provider.py
|
import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
|
import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
|
Add types to ImageProvider and RequiresData
|
Add types to ImageProvider and RequiresData
|
Python
|
mit
|
Mause/statistical_atlas_of_au
|
+ import inspect
import json
- import inspect
- from os.path import join, exists
+ from os.path import exists, join
+ from pathlib import Path
+ from typing import Any, Union
+
+ from ..services import Services
+
+ PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
-
- def __init__(self, data_dir, services):
+ def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
- def has_required_data(self):
+ def has_required_data(self) -> bool:
raise not_implemented()
- def obtain_data(self):
+ def obtain_data(self) -> bool:
raise not_implemented()
- def data_dir_exists(self, name):
+ def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
- def data_dir_join(self, name):
+ def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
- def save_json(self, name, data):
+ def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
- def load_json(self, name):
+ def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
-
- def build_image(self):
+ def build_image(self) -> str:
raise not_implemented()
|
Add types to ImageProvider and RequiresData
|
## Code Before:
import json
import inspect
from os.path import join, exists
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir, services):
self.data_dir = data_dir
self.services = services
def has_required_data(self):
raise not_implemented()
def obtain_data(self):
raise not_implemented()
def data_dir_exists(self, name):
return exists(self.data_dir_join(name))
def data_dir_join(self, name):
return join(self.data_dir, name)
def save_json(self, name, data):
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name):
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self):
raise not_implemented()
## Instruction:
Add types to ImageProvider and RequiresData
## Code After:
import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
def not_implemented():
frame_info = inspect.currentframe().f_back
msg = ''
if 'self' in frame_info.f_locals:
self = frame_info.f_locals['self']
try:
msg += self.__name__ + '#' # for static/class methods
except AttributeError:
msg += self.__class__.__name__ + '.'
msg += frame_info.f_code.co_name + '()'
return NotImplementedError(msg)
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
self.services = services
def has_required_data(self) -> bool:
raise not_implemented()
def obtain_data(self) -> bool:
raise not_implemented()
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
json.dump(data, fh, indent=4)
return True
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
return json.load(fh)
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
|
# ... existing code ...
import inspect
import json
from os.path import exists, join
from pathlib import Path
from typing import Any, Union
from ..services import Services
PathOrStr = Union[str,Path]
# ... modified code ...
class RequiresData:
def __init__(self, data_dir: Path, services: Services) -> None:
self.data_dir = data_dir
...
def has_required_data(self) -> bool:
raise not_implemented()
...
def obtain_data(self) -> bool:
raise not_implemented()
...
def data_dir_exists(self, name: PathOrStr) -> bool:
return exists(self.data_dir_join(name))
...
def data_dir_join(self, name: PathOrStr) -> str:
return join(self.data_dir, name)
...
def save_json(self, name: PathOrStr, data: Any) -> bool:
with open(self.data_dir_join(name), 'w') as fh:
...
def load_json(self, name: PathOrStr) -> Any:
with open(self.data_dir_join(name)) as fh:
...
class ImageProvider(RequiresData):
def build_image(self) -> str:
raise not_implemented()
# ... rest of the code ...
|
9edaa9a843ab4e93deaf1e3b1c09d26e5eadf62d
|
tests/test_acceptance.py
|
tests/test_acceptance.py
|
import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
|
import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
|
Add acceptance tests for results pages (2012 and 2013)
|
Add acceptance tests for results pages (2012 and 2013)
|
Python
|
unlicense
|
nott/next.filmfest.by,nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by,kinaklub/next.filmfest.by,kinaklub/next.filmfest.by,nott/next.filmfest.by
|
import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
+
+ @pytest.mark.django_db
+ def test_resultspage_2012(client):
+ response = client.get('/results2012/')
+ assert response.status_code == 200
+ assert '2012: how it was' in response.content
+ assert 'Agricola de Cologne' in response.content
+
+
+ @pytest.mark.django_db
+ def test_resultspage_2013(client):
+ response = client.get('/results2013/')
+ assert response.status_code == 200
+ assert '2013: good memories' in response.content
+ assert 'Volha Dashuk' in response.content
+
|
Add acceptance tests for results pages (2012 and 2013)
|
## Code Before:
import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
## Instruction:
Add acceptance tests for results pages (2012 and 2013)
## Code After:
import pytest
@pytest.mark.django_db
def test_homepage(client):
response = client.get('/')
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
|
...
assert response.status_code == 200
@pytest.mark.django_db
def test_resultspage_2012(client):
response = client.get('/results2012/')
assert response.status_code == 200
assert '2012: how it was' in response.content
assert 'Agricola de Cologne' in response.content
@pytest.mark.django_db
def test_resultspage_2013(client):
response = client.get('/results2013/')
assert response.status_code == 200
assert '2013: good memories' in response.content
assert 'Volha Dashuk' in response.content
...
|
a0ce4d366681f2f62f232f4f952ac18df07667d4
|
ideascube/conf/idb_fra_cultura.py
|
ideascube/conf/idb_fra_cultura.py
|
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
'id': 'software',
},
{
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
|
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
|
Remove "software" card from Cultura conf
|
Remove "software" card from Cultura conf
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
- 'id': 'software',
- },
- {
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
|
Remove "software" card from Cultura conf
|
## Code Before:
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
'id': 'software',
},
{
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
## Instruction:
Remove "software" card from Cultura conf
## Code After:
"""Ideaxbox Cultura, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Cultura"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
(_('Personal informations'), ['serial', 'short_name', 'full_name', 'latin_name', 'birth_year', 'gender']), # noqa
)
HOME_CARDS = HOME_CARDS + [
{
'id': 'cpassorcier',
},
{
'id': 'wikisource',
},
{
'id': 'ted',
},
{
'id': 'ubuntudoc',
},
]
|
# ... existing code ...
{
'id': 'ted',
# ... rest of the code ...
|
e4fde66624f74c4b0bbfae7c7c11a50884a0a73c
|
pyfr/readers/base.py
|
pyfr/readers/base.py
|
from abc import ABCMeta, abstractmethod
import uuid
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
|
from abc import ABCMeta, abstractmethod
import uuid
import numpy as np
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = np.array(str(uuid.uuid4()), dtype='S')
return mesh
|
Fix the HDF5 type of mesh_uuid for imported meshes.
|
Fix the HDF5 type of mesh_uuid for imported meshes.
|
Python
|
bsd-3-clause
|
BrianVermeire/PyFR,Aerojspark/PyFR
|
from abc import ABCMeta, abstractmethod
import uuid
+
+ import numpy as np
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
- mesh['mesh_uuid'] = str(uuid.uuid4())
+ mesh['mesh_uuid'] = np.array(str(uuid.uuid4()), dtype='S')
return mesh
|
Fix the HDF5 type of mesh_uuid for imported meshes.
|
## Code Before:
from abc import ABCMeta, abstractmethod
import uuid
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = str(uuid.uuid4())
return mesh
## Instruction:
Fix the HDF5 type of mesh_uuid for imported meshes.
## Code After:
from abc import ABCMeta, abstractmethod
import uuid
import numpy as np
class BaseReader(object, metaclass=ABCMeta):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def _to_raw_pyfrm(self):
pass
def to_pyfrm(self):
mesh = self._to_raw_pyfrm()
# Add metadata
mesh['mesh_uuid'] = np.array(str(uuid.uuid4()), dtype='S')
return mesh
|
// ... existing code ...
import uuid
import numpy as np
// ... modified code ...
# Add metadata
mesh['mesh_uuid'] = np.array(str(uuid.uuid4()), dtype='S')
// ... rest of the code ...
|
0d572d60522ae0e80105330981a66bc541434b99
|
rip/filter_operators.py
|
rip/filter_operators.py
|
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val]
|
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
IN = 'in'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val]
|
Support __in as operator for backwards comp
|
Support __in as operator for backwards comp
|
Python
|
mit
|
Aplopio/rip,Aplopio/django_rip
|
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
+ IN = 'in'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
- ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1}
+ ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val]
|
Support __in as operator for backwards comp
|
## Code Before:
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val]
## Instruction:
Support __in as operator for backwards comp
## Code After:
EQUALS = 'equals'
GT = 'gt'
LT = 'lt'
IN = 'in'
OPERATOR_SEPARATOR = '__'
REVERSE_ORDER = '-'
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
def split_to_field_and_filter_type(filter_name):
filter_split = filter_name.split(OPERATOR_SEPARATOR)
filter_type = filter_split[-1] if len(filter_split) > 0 else None
if filter_type in ALL_OPERATORS:
return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type
else:
return filter_name, None
def split_to_field_and_order_type(field_name_with_operator):
if field_name_with_operator.startswith(REVERSE_ORDER):
return field_name_with_operator[1:], REVERSE_ORDER
else:
return field_name_with_operator, None
def transform_to_list(val):
if isinstance(val, (list, tuple)):
return val
else:
return [val]
|
# ... existing code ...
LT = 'lt'
IN = 'in'
# ... modified code ...
ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}
# ... rest of the code ...
|
db8dea37028432c89e098728970fbaa265e49359
|
bookmarks/core/models.py
|
bookmarks/core/models.py
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from taggit.managers import TaggableManager
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField()
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from taggit.managers import TaggableManager
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
|
Increase max length of url field.
|
Increase max length of url field.
|
Python
|
mit
|
tom-henderson/bookmarks,tom-henderson/bookmarks,tom-henderson/bookmarks
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from taggit.managers import TaggableManager
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
- url = models.URLField()
+ url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
|
Increase max length of url field.
|
## Code Before:
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from taggit.managers import TaggableManager
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField()
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
## Instruction:
Increase max length of url field.
## Code After:
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from taggit.managers import TaggableManager
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
|
...
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
...
|
c460fd7d257b25723fc19557ad4404519904e0a9
|
simplecoin/tests/__init__.py
|
simplecoin/tests/__init__.py
|
import simplecoin
import unittest
import datetime
import simplecoin.models as m
from decimal import Decimal
from simplecoin import db
class UnitTest(unittest.TestCase):
""" Represents a set of tests that only need the database iniailized, but
no fixture data """
def setUp(self, **kwargs):
extra = dict()
extra.update(kwargs)
app = simplecoin.create_app('webserver', configs=['test.toml'], **extra)
with app.app_context():
self.db = simplecoin.db
self.setup_db()
self.app = app
self._ctx = self.app.test_request_context()
self._ctx.push()
self.client = self.app.test_client()
def tearDown(self):
# dump the test elasticsearch index
db.session.remove()
db.drop_all()
def setup_db(self):
self.db.drop_all()
self.db.create_all()
db.session.commit()
def make_block(self, **kwargs):
vals = dict(currency="LTC",
height=1,
found_at=datetime.datetime.utcnow(),
time_started=datetime.datetime.utcnow(),
difficulty=12,
merged=False,
algo="scrypt",
total_value=Decimal("50"))
vals.update(kwargs)
blk = m.Block(**vals)
db.session.add(blk)
return blk
class RedisUnitTest(UnitTest):
def setUp(self):
UnitTest.setUp(self)
self.app.redis.flushdb()
|
import simplecoin
import unittest
import datetime
import random
import simplecoin.models as m
from decimal import Decimal
from simplecoin import db
class UnitTest(unittest.TestCase):
""" Represents a set of tests that only need the database iniailized, but
no fixture data """
def setUp(self, **kwargs):
# Set the random seed to a fixed number, causing all use of random
# to actually repeat exactly the same every time
random.seed(0)
extra = dict()
extra.update(kwargs)
app = simplecoin.create_app('webserver', configs=['test.toml'], **extra)
with app.app_context():
self.db = simplecoin.db
self.setup_db()
self.app = app
self._ctx = self.app.test_request_context()
self._ctx.push()
self.client = self.app.test_client()
def tearDown(self):
# dump the test elasticsearch index
db.session.remove()
db.drop_all()
def setup_db(self):
self.db.drop_all()
self.db.create_all()
db.session.commit()
def make_block(self, **kwargs):
vals = dict(currency="LTC",
height=1,
found_at=datetime.datetime.utcnow(),
time_started=datetime.datetime.utcnow(),
difficulty=12,
merged=False,
algo="scrypt",
total_value=Decimal("50"))
vals.update(kwargs)
blk = m.Block(**vals)
db.session.add(blk)
return blk
class RedisUnitTest(UnitTest):
def setUp(self):
UnitTest.setUp(self)
self.app.redis.flushdb()
|
Fix tests to allow use of random, but not change each time
|
Fix tests to allow use of random, but not change each time
|
Python
|
mit
|
nickgzzjr/simplecoin_multi,nickgzzjr/simplecoin_multi,nickgzzjr/simplecoin_multi,nickgzzjr/simplecoin_multi
|
import simplecoin
import unittest
import datetime
+ import random
import simplecoin.models as m
from decimal import Decimal
from simplecoin import db
class UnitTest(unittest.TestCase):
""" Represents a set of tests that only need the database iniailized, but
no fixture data """
def setUp(self, **kwargs):
+ # Set the random seed to a fixed number, causing all use of random
+ # to actually repeat exactly the same every time
+ random.seed(0)
extra = dict()
extra.update(kwargs)
app = simplecoin.create_app('webserver', configs=['test.toml'], **extra)
with app.app_context():
self.db = simplecoin.db
self.setup_db()
self.app = app
self._ctx = self.app.test_request_context()
self._ctx.push()
self.client = self.app.test_client()
def tearDown(self):
# dump the test elasticsearch index
db.session.remove()
db.drop_all()
def setup_db(self):
self.db.drop_all()
self.db.create_all()
db.session.commit()
def make_block(self, **kwargs):
vals = dict(currency="LTC",
height=1,
found_at=datetime.datetime.utcnow(),
time_started=datetime.datetime.utcnow(),
difficulty=12,
merged=False,
algo="scrypt",
total_value=Decimal("50"))
vals.update(kwargs)
blk = m.Block(**vals)
db.session.add(blk)
return blk
class RedisUnitTest(UnitTest):
def setUp(self):
UnitTest.setUp(self)
self.app.redis.flushdb()
|
Fix tests to allow use of random, but not change each time
|
## Code Before:
import simplecoin
import unittest
import datetime
import simplecoin.models as m
from decimal import Decimal
from simplecoin import db
class UnitTest(unittest.TestCase):
""" Represents a set of tests that only need the database iniailized, but
no fixture data """
def setUp(self, **kwargs):
extra = dict()
extra.update(kwargs)
app = simplecoin.create_app('webserver', configs=['test.toml'], **extra)
with app.app_context():
self.db = simplecoin.db
self.setup_db()
self.app = app
self._ctx = self.app.test_request_context()
self._ctx.push()
self.client = self.app.test_client()
def tearDown(self):
# dump the test elasticsearch index
db.session.remove()
db.drop_all()
def setup_db(self):
self.db.drop_all()
self.db.create_all()
db.session.commit()
def make_block(self, **kwargs):
vals = dict(currency="LTC",
height=1,
found_at=datetime.datetime.utcnow(),
time_started=datetime.datetime.utcnow(),
difficulty=12,
merged=False,
algo="scrypt",
total_value=Decimal("50"))
vals.update(kwargs)
blk = m.Block(**vals)
db.session.add(blk)
return blk
class RedisUnitTest(UnitTest):
def setUp(self):
UnitTest.setUp(self)
self.app.redis.flushdb()
## Instruction:
Fix tests to allow use of random, but not change each time
## Code After:
import simplecoin
import unittest
import datetime
import random
import simplecoin.models as m
from decimal import Decimal
from simplecoin import db
class UnitTest(unittest.TestCase):
""" Represents a set of tests that only need the database iniailized, but
no fixture data """
def setUp(self, **kwargs):
# Set the random seed to a fixed number, causing all use of random
# to actually repeat exactly the same every time
random.seed(0)
extra = dict()
extra.update(kwargs)
app = simplecoin.create_app('webserver', configs=['test.toml'], **extra)
with app.app_context():
self.db = simplecoin.db
self.setup_db()
self.app = app
self._ctx = self.app.test_request_context()
self._ctx.push()
self.client = self.app.test_client()
def tearDown(self):
# dump the test elasticsearch index
db.session.remove()
db.drop_all()
def setup_db(self):
self.db.drop_all()
self.db.create_all()
db.session.commit()
def make_block(self, **kwargs):
vals = dict(currency="LTC",
height=1,
found_at=datetime.datetime.utcnow(),
time_started=datetime.datetime.utcnow(),
difficulty=12,
merged=False,
algo="scrypt",
total_value=Decimal("50"))
vals.update(kwargs)
blk = m.Block(**vals)
db.session.add(blk)
return blk
class RedisUnitTest(UnitTest):
def setUp(self):
UnitTest.setUp(self)
self.app.redis.flushdb()
|
// ... existing code ...
import datetime
import random
// ... modified code ...
def setUp(self, **kwargs):
# Set the random seed to a fixed number, causing all use of random
# to actually repeat exactly the same every time
random.seed(0)
extra = dict()
// ... rest of the code ...
|
56bd6c6a0363323cc1f4b3fbbcd460ba446b0c6d
|
cubes/stores.py
|
cubes/stores.py
|
from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
raise CubesError("Unable to find store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
raise CubesError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
|
from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
raise ConfigurationError("Unknown store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
raise ConfigurationError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
|
Raise ConfigurationError error that causes server to fail and dump whole stacktrace
|
Raise ConfigurationError error that causes server to fail and dump whole stacktrace
|
Python
|
mit
|
noyeitan/cubes,ubreddy/cubes,she11c0de/cubes,zejn/cubes,zejn/cubes,pombredanne/cubes,she11c0de/cubes,pombredanne/cubes,ubreddy/cubes,jell0720/cubes,cesarmarinhorj/cubes,noyeitan/cubes,ubreddy/cubes,cesarmarinhorj/cubes,cesarmarinhorj/cubes,zejn/cubes,jell0720/cubes,noyeitan/cubes,pombredanne/cubes,jell0720/cubes,she11c0de/cubes
|
from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
- raise CubesError("Unable to find store '%s'" % name)
+ raise ConfigurationError("Unknown store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
- raise CubesError("Unable to find browser of type '%s'" % type_)
+ raise ConfigurationError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
|
Raise ConfigurationError error that causes server to fail and dump whole stacktrace
|
## Code Before:
from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
raise CubesError("Unable to find store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
raise CubesError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
## Instruction:
Raise ConfigurationError error that causes server to fail and dump whole stacktrace
## Code After:
from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
raise ConfigurationError("Unknown store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
raise ConfigurationError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
|
# ... existing code ...
except KeyError:
raise ConfigurationError("Unknown store '%s'" % name)
# ... modified code ...
except KeyError:
raise ConfigurationError("Unable to find browser of type '%s'" % type_)
# ... rest of the code ...
|
7f0ab829f677a5d91d5b24dc6181a2519e25a934
|
notes/managers.py
|
notes/managers.py
|
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
|
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
|
Support hiding note and notebook template notes
|
Support hiding note and notebook template notes
|
Python
|
agpl-3.0
|
syskill/snowy,NoUsername/PrivateNotesExperimental,GNOME/snowy,jaredjennings/snowy,jaredjennings/snowy,sandyarmstrong/snowy,jaredjennings/snowy,NoUsername/PrivateNotesExperimental,widox/snowy,syskill/snowy,leonhandreke/snowy,leonhandreke/snowy,jaredjennings/snowy,widox/snowy,GNOME/snowy,sandyarmstrong/snowy
|
from django.db import models
class NoteManager(models.Manager):
- def user_viewable(self, request_user, author):
+ def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
- notes = notes.filter(permissions=1)
+ notes = notes.filter(permissions=1)
+ if not templates:
+ notes = notes.exclude(tags__name="system:template")
return notes
|
Support hiding note and notebook template notes
|
## Code Before:
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
## Instruction:
Support hiding note and notebook template notes
## Code After:
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
|
...
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
...
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
...
|
57f5d8c1f02aa5d3a1be5fdc7302a15d416071dc
|
utils/decorators.py
|
utils/decorators.py
|
from functools import wraps
def bot_only(func):
@wraps(func)
def inner(self, *args, **kwargs):
if not self.user.bot:
return
return func(self, *args, **kwargs)
return inner
|
from functools import wraps
def bot_only(coro):
@wraps(coro)
async def inner(self, *args, **kwargs):
if not self.user.bot:
return
return await coro(self, *args, **kwargs)
return inner
|
Make bot_only async, because duh
|
Make bot_only async, because duh
|
Python
|
mit
|
BeatButton/beattie-bot,BeatButton/beattie
|
from functools import wraps
- def bot_only(func):
+ def bot_only(coro):
- @wraps(func)
+ @wraps(coro)
- def inner(self, *args, **kwargs):
+ async def inner(self, *args, **kwargs):
if not self.user.bot:
return
- return func(self, *args, **kwargs)
+ return await coro(self, *args, **kwargs)
return inner
|
Make bot_only async, because duh
|
## Code Before:
from functools import wraps
def bot_only(func):
@wraps(func)
def inner(self, *args, **kwargs):
if not self.user.bot:
return
return func(self, *args, **kwargs)
return inner
## Instruction:
Make bot_only async, because duh
## Code After:
from functools import wraps
def bot_only(coro):
@wraps(coro)
async def inner(self, *args, **kwargs):
if not self.user.bot:
return
return await coro(self, *args, **kwargs)
return inner
|
# ... existing code ...
def bot_only(coro):
@wraps(coro)
async def inner(self, *args, **kwargs):
if not self.user.bot:
# ... modified code ...
return
return await coro(self, *args, **kwargs)
return inner
# ... rest of the code ...
|
2fa0c333cb92557b5ba39e91db41327ae381b6a7
|
Tools/px4params/xmlout.py
|
Tools/px4params/xmlout.py
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
Add version number to parameter meta data
|
Add version number to parameter meta data
|
Python
|
mit
|
darknight-007/Firmware,Aerotenna/Firmware,mcgill-robotics/Firmware,PX4/Firmware,PX4/Firmware,acfloria/Firmware,acfloria/Firmware,mcgill-robotics/Firmware,mcgill-robotics/Firmware,jlecoeur/Firmware,dagar/Firmware,PX4/Firmware,acfloria/Firmware,mje-nz/PX4-Firmware,dagar/Firmware,darknight-007/Firmware,PX4/Firmware,darknight-007/Firmware,dagar/Firmware,Aerotenna/Firmware,mcgill-robotics/Firmware,mje-nz/PX4-Firmware,krbeverx/Firmware,krbeverx/Firmware,PX4/Firmware,mcgill-robotics/Firmware,Aerotenna/Firmware,acfloria/Firmware,acfloria/Firmware,jlecoeur/Firmware,dagar/Firmware,krbeverx/Firmware,mje-nz/PX4-Firmware,Aerotenna/Firmware,Aerotenna/Firmware,darknight-007/Firmware,dagar/Firmware,dagar/Firmware,jlecoeur/Firmware,jlecoeur/Firmware,jlecoeur/Firmware,mje-nz/PX4-Firmware,Aerotenna/Firmware,acfloria/Firmware,Aerotenna/Firmware,acfloria/Firmware,PX4/Firmware,dagar/Firmware,jlecoeur/Firmware,krbeverx/Firmware,jlecoeur/Firmware,darknight-007/Firmware,mje-nz/PX4-Firmware,krbeverx/Firmware,mcgill-robotics/Firmware,mje-nz/PX4-Firmware,mje-nz/PX4-Firmware,krbeverx/Firmware,mcgill-robotics/Firmware,krbeverx/Firmware,PX4/Firmware,jlecoeur/Firmware
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
+ xml_version = xml_document.createElement("version")
+ xml_parameters.appendChild(xml_version)
+ xml_version_value = xml_document.createTextNode("1")
+ xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
Add version number to parameter meta data
|
## Code Before:
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
## Instruction:
Add version number to parameter meta data
## Code After:
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
...
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
...
|
7579cc3058ad172cb058fbefd43f756a2316e256
|
examples/modelzoo/download_model.py
|
examples/modelzoo/download_model.py
|
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
Fix argparse of caffe model download script
|
Fix argparse of caffe model download script
|
Python
|
mit
|
bayerj/chainer,kashif/chainer,AlpacaDB/chainer,AlpacaDB/chainer,kiyukuta/chainer,okuta/chainer,umitanuki/chainer,tkerola/chainer,aonotas/chainer,cupy/cupy,ktnyt/chainer,kikusu/chainer,tscohen/chainer,wkentaro/chainer,okuta/chainer,1986ks/chainer,Kaisuke5/chainer,muupan/chainer,kikusu/chainer,okuta/chainer,ktnyt/chainer,chainer/chainer,keisuke-umezawa/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,wkentaro/chainer,minhpqn/chainer,hvy/chainer,cupy/cupy,chainer/chainer,wavelets/chainer,woodshop/chainer,niboshi/chainer,t-abe/chainer,ikasumi/chainer,kuwa32/chainer,ysekky/chainer,ytoyama/yans_chainer_hackathon,ktnyt/chainer,sinhrks/chainer,jfsantos/chainer,cupy/cupy,elviswf/chainer,jnishi/chainer,niboshi/chainer,benob/chainer,tigerneil/chainer,sinhrks/chainer,jnishi/chainer,jnishi/chainer,yanweifu/chainer,niboshi/chainer,keisuke-umezawa/chainer,anaruse/chainer,ktnyt/chainer,chainer/chainer,muupan/chainer,woodshop/complex-chainer,masia02/chainer,wkentaro/chainer,hidenori-t/chainer,delta2323/chainer,tereka114/chainer,hvy/chainer,pfnet/chainer,wkentaro/chainer,jnishi/chainer,keisuke-umezawa/chainer,laysakura/chainer,hvy/chainer,t-abe/chainer,rezoo/chainer,okuta/chainer,cemoody/chainer,truongdq/chainer,keisuke-umezawa/chainer,sou81821/chainer,ronekko/chainer,cupy/cupy,benob/chainer,truongdq/chainer
|
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
- descriptor='Download a Caffe reference model')
+ description='Download a Caffe reference model')
- parser.add_argument('model_type',
+ parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
Fix argparse of caffe model download script
|
## Code Before:
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
## Instruction:
Fix argparse of caffe model download script
## Code After:
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
...
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
...
|
d5b326d8d368d2ac75c6e078572df8c28704c163
|
vcs/models.py
|
vcs/models.py
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
Use the app string version of foreign keying. It prevents a circular import.
|
Use the app string version of foreign keying. It prevents a circular import.
|
Python
|
bsd-3-clause
|
AeroNotix/django-timetracker,AeroNotix/django-timetracker,AeroNotix/django-timetracker
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
- from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
- Tbluser,
+ 'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
Use the app string version of foreign keying. It prevents a circular import.
|
## Code Before:
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
## Instruction:
Use the app string version of foreign keying. It prevents a circular import.
## Code After:
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
// ... existing code ...
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
// ... rest of the code ...
|
8d80401d19a5635053ceefcbb2bc4cfe8bb7a339
|
spoppy/config.py
|
spoppy/config.py
|
import getpass
import os
from appdirs import user_cache_dir
CONFIG_FILE_NAME = os.path.join(
user_cache_dir(appname='spoppy'), '.creds'
)
def get_config():
if os.path.exists(CONFIG_FILE_NAME):
with open(CONFIG_FILE_NAME, 'r') as f:
return [
line.strip() for line in f.readlines()
][:2]
return None, None
def set_config(username, password):
with open(CONFIG_FILE_NAME, 'w') as f:
f.write(username)
f.write('\n')
f.write(password)
def get_config_from_user():
username, password = (
input('Username: '),
getpass.getpass('Password: ')
)
set_config(username, password)
return username, password
def clear_config():
os.remove(CONFIG_FILE_NAME)
|
import getpass
import os
from appdirs import user_cache_dir
try:
# python2.7
input = raw_input
except NameError:
pass
CONFIG_FILE_NAME = os.path.join(
user_cache_dir(appname='spoppy'), '.creds'
)
def get_config():
if os.path.exists(CONFIG_FILE_NAME):
with open(CONFIG_FILE_NAME, 'r') as f:
return [
line.strip() for line in f.readlines()
][:2]
return None, None
def set_config(username, password):
with open(CONFIG_FILE_NAME, 'w') as f:
f.write(username)
f.write('\n')
f.write(password)
def get_config_from_user():
username, password = (
input('Username: '),
getpass.getpass('Password: ')
)
set_config(username, password)
return username, password
def clear_config():
os.remove(CONFIG_FILE_NAME)
|
Fix error with saving credentials in python 2.7
|
Fix error with saving credentials in python 2.7
This fixes #102
|
Python
|
mit
|
sindrig/spoppy,sindrig/spoppy
|
import getpass
import os
from appdirs import user_cache_dir
+
+ try:
+ # python2.7
+ input = raw_input
+ except NameError:
+ pass
CONFIG_FILE_NAME = os.path.join(
user_cache_dir(appname='spoppy'), '.creds'
)
def get_config():
if os.path.exists(CONFIG_FILE_NAME):
with open(CONFIG_FILE_NAME, 'r') as f:
return [
line.strip() for line in f.readlines()
][:2]
return None, None
def set_config(username, password):
with open(CONFIG_FILE_NAME, 'w') as f:
f.write(username)
f.write('\n')
f.write(password)
def get_config_from_user():
username, password = (
input('Username: '),
getpass.getpass('Password: ')
)
set_config(username, password)
return username, password
def clear_config():
os.remove(CONFIG_FILE_NAME)
|
Fix error with saving credentials in python 2.7
|
## Code Before:
import getpass
import os
from appdirs import user_cache_dir
CONFIG_FILE_NAME = os.path.join(
user_cache_dir(appname='spoppy'), '.creds'
)
def get_config():
if os.path.exists(CONFIG_FILE_NAME):
with open(CONFIG_FILE_NAME, 'r') as f:
return [
line.strip() for line in f.readlines()
][:2]
return None, None
def set_config(username, password):
with open(CONFIG_FILE_NAME, 'w') as f:
f.write(username)
f.write('\n')
f.write(password)
def get_config_from_user():
username, password = (
input('Username: '),
getpass.getpass('Password: ')
)
set_config(username, password)
return username, password
def clear_config():
os.remove(CONFIG_FILE_NAME)
## Instruction:
Fix error with saving credentials in python 2.7
## Code After:
import getpass
import os
from appdirs import user_cache_dir
try:
# python2.7
input = raw_input
except NameError:
pass
CONFIG_FILE_NAME = os.path.join(
user_cache_dir(appname='spoppy'), '.creds'
)
def get_config():
if os.path.exists(CONFIG_FILE_NAME):
with open(CONFIG_FILE_NAME, 'r') as f:
return [
line.strip() for line in f.readlines()
][:2]
return None, None
def set_config(username, password):
with open(CONFIG_FILE_NAME, 'w') as f:
f.write(username)
f.write('\n')
f.write(password)
def get_config_from_user():
username, password = (
input('Username: '),
getpass.getpass('Password: ')
)
set_config(username, password)
return username, password
def clear_config():
os.remove(CONFIG_FILE_NAME)
|
// ... existing code ...
from appdirs import user_cache_dir
try:
# python2.7
input = raw_input
except NameError:
pass
// ... rest of the code ...
|
642908032012baf200ab227803982730c6d4b083
|
stdnum/ca/__init__.py
|
stdnum/ca/__init__.py
|
"""Collection of Canadian numbers."""
|
"""Collection of Canadian numbers."""
from stdnum.ca import bn as vat # noqa: F401
|
Add missing vat alias for Canada
|
Add missing vat alias for Canada
|
Python
|
lgpl-2.1
|
arthurdejong/python-stdnum,arthurdejong/python-stdnum,arthurdejong/python-stdnum
|
"""Collection of Canadian numbers."""
+ from stdnum.ca import bn as vat # noqa: F401
|
Add missing vat alias for Canada
|
## Code Before:
"""Collection of Canadian numbers."""
## Instruction:
Add missing vat alias for Canada
## Code After:
"""Collection of Canadian numbers."""
from stdnum.ca import bn as vat # noqa: F401
|
// ... existing code ...
"""Collection of Canadian numbers."""
from stdnum.ca import bn as vat # noqa: F401
// ... rest of the code ...
|
82f2fb3c3956e4ad4c65b03b3918ea409593d4ef
|
gcloud/__init__.py
|
gcloud/__init__.py
|
"""GCloud API access in idiomatic Python."""
__version__ = '0.02.2'
|
"""GCloud API access in idiomatic Python."""
from pkg_resources import get_distribution
__version__ = get_distribution('gcloud').version
|
Read module version from setup.py
|
Read module version from setup.py
|
Python
|
apache-2.0
|
googleapis/google-cloud-python,blowmage/gcloud-python,thesandlord/gcloud-python,calpeyser/google-cloud-python,CyrusBiotechnology/gcloud-python,waprin/gcloud-python,VitalLabs/gcloud-python,waprin/google-cloud-python,jonparrott/google-cloud-python,Fkawala/gcloud-python,tswast/google-cloud-python,waprin/gcloud-python,dhermes/gcloud-python,quom/google-cloud-python,tseaver/google-cloud-python,tartavull/google-cloud-python,optimizely/gcloud-python,lucemia/gcloud-python,CyrusBiotechnology/gcloud-python,tswast/google-cloud-python,VitalLabs/gcloud-python,GrimDerp/gcloud-python,tseaver/gcloud-python,blowmage/gcloud-python,vj-ug/gcloud-python,jbuberel/gcloud-python,tswast/google-cloud-python,jonparrott/google-cloud-python,tseaver/google-cloud-python,tartavull/google-cloud-python,dhermes/google-cloud-python,tseaver/google-cloud-python,dhermes/google-cloud-python,quom/google-cloud-python,tseaver/gcloud-python,optimizely/gcloud-python,jonparrott/gcloud-python,dhermes/gcloud-python,googleapis/google-cloud-python,EugenePig/gcloud-python,EugenePig/gcloud-python,GrimDerp/gcloud-python,lucemia/gcloud-python,daspecster/google-cloud-python,jonparrott/gcloud-python,daspecster/google-cloud-python,jgeewax/gcloud-python,GoogleCloudPlatform/gcloud-python,jbuberel/gcloud-python,waprin/google-cloud-python,elibixby/gcloud-python,thesandlord/gcloud-python,GoogleCloudPlatform/gcloud-python,calpeyser/google-cloud-python,Fkawala/gcloud-python,elibixby/gcloud-python,dhermes/google-cloud-python,optimizely/gcloud-python,vj-ug/gcloud-python,jgeewax/gcloud-python
|
"""GCloud API access in idiomatic Python."""
+ from pkg_resources import get_distribution
- __version__ = '0.02.2'
+ __version__ = get_distribution('gcloud').version
|
Read module version from setup.py
|
## Code Before:
"""GCloud API access in idiomatic Python."""
__version__ = '0.02.2'
## Instruction:
Read module version from setup.py
## Code After:
"""GCloud API access in idiomatic Python."""
from pkg_resources import get_distribution
__version__ = get_distribution('gcloud').version
|
# ... existing code ...
from pkg_resources import get_distribution
__version__ = get_distribution('gcloud').version
# ... rest of the code ...
|
e7bfa4bc9bc8c1caf7ef5f4618943543bed99f0a
|
linter.py
|
linter.py
|
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
Use the platform() in sublime.py rather than importing platform.
|
Use the platform() in sublime.py rather than importing platform.
|
Python
|
mit
|
jfcherng/SublimeLinter-contrib-iverilog,jfcherng/SublimeLinter-contrib-iverilog
|
+ import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
- import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
- if platform.system() == 'Windows':
+ if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
Use the platform() in sublime.py rather than importing platform.
|
## Code Before:
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
## Instruction:
Use the platform() in sublime.py rather than importing platform.
## Code After:
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
// ... existing code ...
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
// ... modified code ...
if sublime.platform() == 'windows':
regex = (
// ... rest of the code ...
|
edb10e7ae1f428dade04a9976c3b3f985065d458
|
settings/__init__.py
|
settings/__init__.py
|
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
from .prod import * # noqa
except ImportError:
pass
|
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
except ImportError:
pass
try:
from .prod import * # noqa
except ImportError:
pass
|
Make sure prod.py is read in settings
|
Make sure prod.py is read in settings
|
Python
|
mit
|
hTrap/junction,farhaanbukhsh/junction,ChillarAnand/junction,farhaanbukhsh/junction,akshayaurora/junction,NabeelValapra/junction,pythonindia/junction,shashisp/junction,hTrap/junction,ChillarAnand/junction,shashisp/junction,nava45/junction,NabeelValapra/junction,shashisp/junction,farhaanbukhsh/junction,akshayaurora/junction,shashisp/junction,nava45/junction,pythonindia/junction,Rahul91/junction,NabeelValapra/junction,hTrap/junction,Rahul91/junction,Rahul91/junction,praba230890/junction,Rahul91/junction,akshayaurora/junction,nava45/junction,nava45/junction,hTrap/junction,pythonindia/junction,ChillarAnand/junction,pythonindia/junction,akshayaurora/junction,praba230890/junction,NabeelValapra/junction,praba230890/junction,ChillarAnand/junction,farhaanbukhsh/junction,praba230890/junction
|
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
+ except ImportError:
+ pass
+
+ try:
from .prod import * # noqa
except ImportError:
pass
|
Make sure prod.py is read in settings
|
## Code Before:
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
from .prod import * # noqa
except ImportError:
pass
## Instruction:
Make sure prod.py is read in settings
## Code After:
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
except ImportError:
pass
try:
from .prod import * # noqa
except ImportError:
pass
|
...
from .dev import * # noqa
except ImportError:
pass
try:
from .prod import * # noqa
...
|
b1f173fdbfb60e26a3923c7b024bc3e65e5abf80
|
selvbetjening/scheckin/now/urls.py
|
selvbetjening/scheckin/now/urls.py
|
from django.conf.urls import *
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
)
|
from django.conf.urls import *
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
import selvbetjening.sadmin.base.sadmin
now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
now_page = RemoteSPage(_(u'Now Check-in'), now_url)
selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
Add links to easy check-in in sadmin
|
Add links to easy check-in in sadmin
|
Python
|
mit
|
animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening
|
from django.conf.urls import *
+ from django.conf import settings
+ from django.core.urlresolvers import reverse
+ from django.utils.translation import ugettext as _
+
+ from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
- url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
+ url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
+ if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
+ import selvbetjening.sadmin.base.sadmin
+
+ now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
+ now_page = RemoteSPage(_(u'Now Check-in'), now_url)
+
+ selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
Add links to easy check-in in sadmin
|
## Code Before:
from django.conf.urls import *
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin),
)
## Instruction:
Add links to easy check-in in sadmin
## Code After:
from django.conf.urls import *
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from selvbetjening.sadmin.base.nav import RemoteSPage
import views
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
import selvbetjening.sadmin.base.sadmin
now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
now_page = RemoteSPage(_(u'Now Check-in'), now_url)
selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
|
...
from django.conf.urls import *
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from selvbetjening.sadmin.base.nav import RemoteSPage
...
urlpatterns = patterns('',
url(r'^(?P<event_id>[0-9]+)/$', views.checkin, name='now_checkin'),
)
if 'selvbetjening.sadmin.events' in settings.INSTALLED_APPS:
import selvbetjening.sadmin.base.sadmin
now_url = lambda context, stack: reverse('now_checkin', kwargs={'event_id': stack[-1].pk})
now_page = RemoteSPage(_(u'Now Check-in'), now_url)
selvbetjening.sadmin.base.sadmin.site.get('events').attendee_admin.sadmin_action_menu.register(now_page)
...
|
766ea05836544b808cd2c346873d9e4f60c858a1
|
ping/tests/test_ping.py
|
ping/tests/test_ping.py
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
Update test to assert metric
|
Update test to assert metric
|
Python
|
bsd-3-clause
|
DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras
|
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
+ aggregator.assert_metric('network.ping.can_connect', value=1)
|
Update test to assert metric
|
## Code Before:
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
## Instruction:
Update test to assert metric
## Code After:
import pytest
import mock
from datadog_checks.checks import AgentCheck
from datadog_checks.ping import PingCheck
from datadog_checks.errors import CheckException
def mock_exec_ping():
return """FAKEPING 127.0.0.1 (127.0.0.1): 56 data bytes
64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.093 ms
--- 127.0.0.1 ping statistics ---
1 packets transmitted, 1 packets received, 0.0% packet loss
round-trip min/avg/max/stddev = 0.093/0.093/0.093/0.000 ms"""
def test_check(aggregator, instance):
c = PingCheck('ping', {}, {})
# empty instance
instance = {}
with pytest.raises(CheckException):
c.check(instance)
# only name
with pytest.raises(CheckException):
c.check({'name': 'Datadog'})
test_check
# good check
instance = {
'host': '127.0.0.1',
'name': "Localhost"
}
with mock.patch.object(c, "_exec_ping", return_value=mock_exec_ping()):
c.check(instance)
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
|
...
aggregator.assert_service_check('network.ping.can_connect', AgentCheck.OK)
aggregator.assert_metric('network.ping.can_connect', value=1)
...
|
7dc34b159f837d4fdc71666233f66d340cfd3419
|
src/info_retrieval/info_retrieval.py
|
src/info_retrieval/info_retrieval.py
|
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
Add debugging statement to retrieve_passages function
|
Add debugging statement to retrieve_passages function
|
Python
|
mit
|
amkahn/question-answering,amkahn/question-answering
|
from pymur import *
from general_classes import *
+ import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
+ sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
Add debugging statement to retrieve_passages function
|
## Code Before:
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
## Instruction:
Add debugging statement to retrieve_passages function
## Code After:
from pymur import *
from general_classes import *
import sys
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin:end], doc.score, doc_id)
passages.append(passage)
return passages
|
// ... existing code ...
from general_classes import *
import sys
// ... modified code ...
query = " ".join(query)
sys.stderr.write(query)
# second argument is the number of documents desired
// ... rest of the code ...
|
563316ca4df666ada6e2b0c6a224a159b06884d0
|
tests.py
|
tests.py
|
import datetime
import unittest
import mock
from nose.tools import assert_equal
from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
mock_datareader()
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
def test_get_required_tickers_parses_tickers_with_newline(self):
m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
textfile = None # only used to provide valid argument
with mock.patch('pandas_finance.open', m, create=True):
result = get_required_tickers(textfile)
assert_equal('TWTR,FB,AAPL,MSFT', result)
|
import datetime
import unittest
import mock
from nose.tools import assert_equal
from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
def test_get_required_tickers_parses_tickers_with_newline(self):
m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
textfile = None # only used to provide valid argument
with mock.patch('pandas_finance.open', m, create=True):
result = get_required_tickers(textfile)
assert_equal('TWTR,FB,AAPL,MSFT', result)
|
Remove unnecessary call to mock_datareader().
|
Remove unnecessary call to mock_datareader().
|
Python
|
agpl-3.0
|
scraperwiki/stock-tool,scraperwiki/stock-tool
|
import datetime
import unittest
import mock
from nose.tools import assert_equal
from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
- mock_datareader()
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
def test_get_required_tickers_parses_tickers_with_newline(self):
m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
textfile = None # only used to provide valid argument
with mock.patch('pandas_finance.open', m, create=True):
result = get_required_tickers(textfile)
assert_equal('TWTR,FB,AAPL,MSFT', result)
|
Remove unnecessary call to mock_datareader().
|
## Code Before:
import datetime
import unittest
import mock
from nose.tools import assert_equal
from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
mock_datareader()
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
def test_get_required_tickers_parses_tickers_with_newline(self):
m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
textfile = None # only used to provide valid argument
with mock.patch('pandas_finance.open', m, create=True):
result = get_required_tickers(textfile)
assert_equal('TWTR,FB,AAPL,MSFT', result)
## Instruction:
Remove unnecessary call to mock_datareader().
## Code After:
import datetime
import unittest
import mock
from nose.tools import assert_equal
from pandas_finance import get_stock, get_required_tickers
class PandasFinanceTestCase(unittest.TestCase):
@mock.patch('pandas_finance.web.DataReader')
def test_get_stock_called_correctly(self, mock_datareader):
start = datetime.datetime(1999, 4, 3, 0, 0)
end = datetime.datetime(2005, 2, 5, 0, 0)
get_stock('AAPL', start, end)
mock_datareader.assert_called_with('AAPL', 'yahoo', start, end)
def test_get_required_tickers_parses_tickers_with_newline(self):
m = mock.mock_open(read_data='TWTR,FB,AAPL,MSFT\n')
textfile = None # only used to provide valid argument
with mock.patch('pandas_finance.open', m, create=True):
result = get_required_tickers(textfile)
assert_equal('TWTR,FB,AAPL,MSFT', result)
|
...
def test_get_stock_called_correctly(self, mock_datareader):
start = datetime.datetime(1999, 4, 3, 0, 0)
...
|
b04e7afbd56518ba0e825d70b11a0c88e2d6e29d
|
astm/tests/utils.py
|
astm/tests/utils.py
|
class DummyMixIn(object):
_input_buffer = ''
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
class DummyMixIn(object):
_input_buffer = ''
addr = ('localhost', '15200')
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
Set dummy address info for tests.
|
Set dummy address info for tests.
|
Python
|
bsd-3-clause
|
asingla87/python-astm,andrexmd/python-astm,pombreda/python-astm,mhaulo/python-astm,MarcosHaenisch/python-astm,briankip/python-astm,kxepal/python-astm,123412345/python-astm,tinoshot/python-astm,eddiep1101/python-astm,LogicalKnight/python-astm,Iskander1b/python-astm,AlanZatarain/python-astm,kxepal/python-astm,tectronics/python-astm,Alwnikrotikz/python-astm
|
class DummyMixIn(object):
_input_buffer = ''
+ addr = ('localhost', '15200')
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
Set dummy address info for tests.
|
## Code Before:
class DummyMixIn(object):
_input_buffer = ''
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
## Instruction:
Set dummy address info for tests.
## Code After:
class DummyMixIn(object):
_input_buffer = ''
addr = ('localhost', '15200')
def flush(self):
pass
def close(self):
pass
class CallLogger(object):
def __init__(self, func):
self.func = func
self.was_called = False
def __call__(self, *args, **kwargs):
self.was_called = True
return self.func(*args, **kwargs)
def track_call(func):
return CallLogger(func)
|
// ... existing code ...
_input_buffer = ''
addr = ('localhost', '15200')
// ... rest of the code ...
|
af8e871eb2752f0fe75ccd7b2a12f81a5ef19d04
|
tests/test_np.py
|
tests/test_np.py
|
from parser_tool import parse, get_parser
def test_np():
grammar = get_parser("grammars/test_np.fcfg", trace=0)
f = open("grammars/nounphrase.sample")
for line in f:
# remove newline
actual_line = line[:-1]
trees = parse(grammar, actual_line)
assert len(trees) > 0, "Failed: %s" % actual_line
f.close()
|
from parser_tool import parse, get_parser
from utils import go_over_file
grammar = get_parser("grammars/test_np.fcfg", trace=0)
def test_np_positive():
def is_ok(sentence):
trees = parse(grammar, sentence)
assert len(trees) > 0, "Failed: %s" % sentence
go_over_file("grammars/nounphrase.sample", is_ok)
def test_np_negative():
""" tests to see if grammar refuses wrong samples """
def is_not_ok(sentence):
trees = parse(grammar, sentence)
assert len(trees) == 0, "Failed: %s" % sentence
go_over_file("grammars/nounphrase.sample.negative", is_not_ok)
|
Test both correct and wrong samples of noun phrase
|
Test both correct and wrong samples of noun phrase
Extended noun phrase test to check for the grammar refusing wrong
samples of noun phrases. Also added a utility module called
utils.py
|
Python
|
mit
|
caninemwenja/marker,kmwenja/marker
|
from parser_tool import parse, get_parser
+ from utils import go_over_file
- def test_np():
- grammar = get_parser("grammars/test_np.fcfg", trace=0)
+ grammar = get_parser("grammars/test_np.fcfg", trace=0)
+ def test_np_positive():
+ def is_ok(sentence):
+ trees = parse(grammar, sentence)
+ assert len(trees) > 0, "Failed: %s" % sentence
+
- f = open("grammars/nounphrase.sample")
+ go_over_file("grammars/nounphrase.sample", is_ok)
- for line in f:
- # remove newline
- actual_line = line[:-1]
+ def test_np_negative():
+ """ tests to see if grammar refuses wrong samples """
+
+ def is_not_ok(sentence):
- trees = parse(grammar, actual_line)
+ trees = parse(grammar, sentence)
- assert len(trees) > 0, "Failed: %s" % actual_line
+ assert len(trees) == 0, "Failed: %s" % sentence
- f.close()
+ go_over_file("grammars/nounphrase.sample.negative", is_not_ok)
+
+
|
Test both correct and wrong samples of noun phrase
|
## Code Before:
from parser_tool import parse, get_parser
def test_np():
grammar = get_parser("grammars/test_np.fcfg", trace=0)
f = open("grammars/nounphrase.sample")
for line in f:
# remove newline
actual_line = line[:-1]
trees = parse(grammar, actual_line)
assert len(trees) > 0, "Failed: %s" % actual_line
f.close()
## Instruction:
Test both correct and wrong samples of noun phrase
## Code After:
from parser_tool import parse, get_parser
from utils import go_over_file
grammar = get_parser("grammars/test_np.fcfg", trace=0)
def test_np_positive():
def is_ok(sentence):
trees = parse(grammar, sentence)
assert len(trees) > 0, "Failed: %s" % sentence
go_over_file("grammars/nounphrase.sample", is_ok)
def test_np_negative():
""" tests to see if grammar refuses wrong samples """
def is_not_ok(sentence):
trees = parse(grammar, sentence)
assert len(trees) == 0, "Failed: %s" % sentence
go_over_file("grammars/nounphrase.sample.negative", is_not_ok)
|
// ... existing code ...
from parser_tool import parse, get_parser
from utils import go_over_file
grammar = get_parser("grammars/test_np.fcfg", trace=0)
def test_np_positive():
def is_ok(sentence):
trees = parse(grammar, sentence)
assert len(trees) > 0, "Failed: %s" % sentence
go_over_file("grammars/nounphrase.sample", is_ok)
def test_np_negative():
""" tests to see if grammar refuses wrong samples """
def is_not_ok(sentence):
trees = parse(grammar, sentence)
assert len(trees) == 0, "Failed: %s" % sentence
go_over_file("grammars/nounphrase.sample.negative", is_not_ok)
// ... rest of the code ...
|
47f495e7e5b8fa06991e0c263bc9239818dd5b4f
|
airpy/list.py
|
airpy/list.py
|
import os
import airpy
def airlist():
installed_docs = os.listdir(airpy.data_directory)
for dir in installed_docs:
print(dir, end= ' ')
print(end = '\n')
|
from __future__ import print_function
import os
import airpy
def airlist():
installed_docs = os.listdir(airpy.data_directory)
for dir in installed_docs:
print(dir, end= ' ')
print(end = '\n')
|
Add a Backwards compatibility for python 2.7 by adding a __future__ import
|
Add a Backwards compatibility for python 2.7 by adding a __future__ import
|
Python
|
mit
|
kevinaloys/airpy
|
+ from __future__ import print_function
import os
import airpy
+
def airlist():
installed_docs = os.listdir(airpy.data_directory)
for dir in installed_docs:
print(dir, end= ' ')
print(end = '\n')
+
|
Add a Backwards compatibility for python 2.7 by adding a __future__ import
|
## Code Before:
import os
import airpy
def airlist():
installed_docs = os.listdir(airpy.data_directory)
for dir in installed_docs:
print(dir, end= ' ')
print(end = '\n')
## Instruction:
Add a Backwards compatibility for python 2.7 by adding a __future__ import
## Code After:
from __future__ import print_function
import os
import airpy
def airlist():
installed_docs = os.listdir(airpy.data_directory)
for dir in installed_docs:
print(dir, end= ' ')
print(end = '\n')
|
...
from __future__ import print_function
import os
...
import airpy
def airlist():
...
|
40fd8c680f335ebd1bc217f35a47f169c336530c
|
pyosf/tools.py
|
pyosf/tools.py
|
def find_by_key(in_list, key, val):
"""Returns the first item with key matching val
"""
return (item for item in in_list if item[key] == val).next()
def dict_from_list(in_list, key):
"""From a list of dicts creates a dict of dicts using a given key name
"""
d = {}
for entry in in_list:
d[entry[key]] = entry
return d
|
def find_by_key(in_list, key, val):
"""Returns the first item with key matching val
"""
return next(item for item in in_list if item[key] == val)
def dict_from_list(in_list, key):
"""From a list of dicts creates a dict of dicts using a given key name
"""
d = {}
for entry in in_list:
d[entry[key]] = entry
return d
|
Fix compatibility with Py3 (generators no longer have next())
|
Fix compatibility with Py3 (generators no longer have next())
But there is a next() function as a general built-in and works in 2.6 too
|
Python
|
mit
|
psychopy/pyosf
|
def find_by_key(in_list, key, val):
"""Returns the first item with key matching val
"""
- return (item for item in in_list if item[key] == val).next()
+ return next(item for item in in_list if item[key] == val)
def dict_from_list(in_list, key):
"""From a list of dicts creates a dict of dicts using a given key name
"""
d = {}
for entry in in_list:
d[entry[key]] = entry
return d
|
Fix compatibility with Py3 (generators no longer have next())
|
## Code Before:
def find_by_key(in_list, key, val):
"""Returns the first item with key matching val
"""
return (item for item in in_list if item[key] == val).next()
def dict_from_list(in_list, key):
"""From a list of dicts creates a dict of dicts using a given key name
"""
d = {}
for entry in in_list:
d[entry[key]] = entry
return d
## Instruction:
Fix compatibility with Py3 (generators no longer have next())
## Code After:
def find_by_key(in_list, key, val):
"""Returns the first item with key matching val
"""
return next(item for item in in_list if item[key] == val)
def dict_from_list(in_list, key):
"""From a list of dicts creates a dict of dicts using a given key name
"""
d = {}
for entry in in_list:
d[entry[key]] = entry
return d
|
# ... existing code ...
"""
return next(item for item in in_list if item[key] == val)
# ... rest of the code ...
|
ca31ecaf79e42cacc023277aa163af8887a360ad
|
mlog/log.py
|
mlog/log.py
|
import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
|
import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
|
Use with statement when writing to a file
|
Use with statement when writing to a file
|
Python
|
agpl-3.0
|
fajran/mlog
|
import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
- f = open(fname, 'a')
+ with open(fname, 'a') as f:
- f.write("=== %s ===\n" % datetime.now())
+ f.write("=== %s ===\n" % datetime.now())
- f.write("args: %s\n" % (" ".join(param),))
+ f.write("args: %s\n" % (" ".join(param),))
- f.write("-------------\n")
+ f.write("-------------\n")
- f.write(email)
+ f.write(email)
- f.write("\n\n")
+ f.write("\n\n")
- f.flush()
+ f.flush()
- f.close()
|
Use with statement when writing to a file
|
## Code Before:
import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
f = open(fname, 'a')
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
f.close()
## Instruction:
Use with statement when writing to a file
## Code After:
import gzip
import json
from datetime import datetime
def log_database(conn, param, email):
param = json.dumps(param)
email_gz = gzip.compress(email.encode('ascii'))
values = (param, email_gz)
c = conn.cursor()
c.execute('''
INSERT INTO email_log (`param`, `email_gz`)
VALUES (?, ?)
''', values)
def log_text(fname, param, email):
from datetime import datetime
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
|
...
with open(fname, 'a') as f:
f.write("=== %s ===\n" % datetime.now())
f.write("args: %s\n" % (" ".join(param),))
f.write("-------------\n")
f.write(email)
f.write("\n\n")
f.flush()
...
|
6448691ed77be2fd74761e056eeb5f16a881fd54
|
test_settings.py
|
test_settings.py
|
from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
|
from foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
Adjust test settings to be in line with jmbo-skeleton
|
Adjust test settings to be in line with jmbo-skeleton
|
Python
|
bsd-3-clause
|
praekelt/jmbo-competition,praekelt/jmbo-competition
|
from foundry.settings import *
- # We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
- # in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
- SOUTH_TESTS_MIGRATE = False
+ # Need this last line until django-setuptest is improved.
|
Adjust test settings to be in line with jmbo-skeleton
|
## Code Before:
from foundry.settings import *
# We cannot use ssqlite or spatialite because it cannot handle the 'distinct'
# in admin.py.
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
SOUTH_TESTS_MIGRATE = False
## Instruction:
Adjust test settings to be in line with jmbo-skeleton
## Code After:
from foundry.settings import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'competition',
'USER': 'test',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Need this last line until django-setuptest is improved.
|
# ... existing code ...
DATABASES = {
# ... modified code ...
# Need this last line until django-setuptest is improved.
# ... rest of the code ...
|
0c79d2fee14d5d2bff51ade9d643df22dde7f301
|
polyaxon/polyaxon/config_settings/scheduler/__init__.py
|
polyaxon/polyaxon/config_settings/scheduler/__init__.py
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
Add registry settings to scheduler
|
Add registry settings to scheduler
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
+ from polyaxon.config_settings.registry import *
from .apps import *
|
Add registry settings to scheduler
|
## Code Before:
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from .apps import *
## Instruction:
Add registry settings to scheduler
## Code After:
from polyaxon.config_settings.k8s import *
from polyaxon.config_settings.dirs import *
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
|
// ... existing code ...
from polyaxon.config_settings.spawner import *
from polyaxon.config_settings.registry import *
from .apps import *
// ... rest of the code ...
|
98acdc9262cfa8c5da092e0c3b1264afdcbde66a
|
locations/spiders/speedway.py
|
locations/spiders/speedway.py
|
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
name = "superamerica"
allowed_domains = ["superamerica.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
|
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
name = "speedway"
allowed_domains = ["www.speedway.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
|
Correct the name of the spider
|
Correct the name of the spider
|
Python
|
mit
|
iandees/all-the-places,iandees/all-the-places,iandees/all-the-places
|
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
- name = "superamerica"
+ name = "speedway"
- allowed_domains = ["superamerica.com"]
+ allowed_domains = ["www.speedway.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
|
Correct the name of the spider
|
## Code Before:
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
name = "superamerica"
allowed_domains = ["superamerica.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
## Instruction:
Correct the name of the spider
## Code After:
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
name = "speedway"
allowed_domains = ["www.speedway.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
|
...
class SuperAmericaSpider(scrapy.Spider):
name = "speedway"
allowed_domains = ["www.speedway.com"]
start_urls = (
...
|
230bb0a09146cd0b696b528b3ad6dd9ccf057113
|
tests/test_checker.py
|
tests/test_checker.py
|
import pytest
import os, stat
from botbot import checker, problems
def test_fastq_checker():
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM
assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checker.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
|
import pytest
import os, stat
from botbot import checker, problems
def test_fastq_checker_path_names():
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM
assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM
def test_fastq_checker_symlinks(tmpdir):
prev = tmpdir.chdir()
# Make a test file
p = tmpdir.join("bad.fastq")
p.write('')
os.symlink(p.basename, "good.fastq")
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.fastq") == problems.PROB_NO_PROBLEM
prev.chdir()
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checker.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
|
Add test for symlink detection
|
Add test for symlink detection
|
Python
|
mit
|
jackstanek/BotBot,jackstanek/BotBot
|
import pytest
import os, stat
from botbot import checker, problems
- def test_fastq_checker():
+ def test_fastq_checker_path_names():
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM
assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM
+
+ def test_fastq_checker_symlinks(tmpdir):
+ prev = tmpdir.chdir()
+
+ # Make a test file
+ p = tmpdir.join("bad.fastq")
+ p.write('')
+ os.symlink(p.basename, "good.fastq")
+
+ assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
+ assert checker.is_fastq("good.fastq") == problems.PROB_NO_PROBLEM
+ prev.chdir()
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checker.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
|
Add test for symlink detection
|
## Code Before:
import pytest
import os, stat
from botbot import checker, problems
def test_fastq_checker():
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM
assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checker.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
## Instruction:
Add test for symlink detection
## Code After:
import pytest
import os, stat
from botbot import checker, problems
def test_fastq_checker_path_names():
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.py") == problems.PROB_NO_PROBLEM
assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM
def test_fastq_checker_symlinks(tmpdir):
prev = tmpdir.chdir()
# Make a test file
p = tmpdir.join("bad.fastq")
p.write('')
os.symlink(p.basename, "good.fastq")
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.fastq") == problems.PROB_NO_PROBLEM
prev.chdir()
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checker.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
|
# ... existing code ...
def test_fastq_checker_path_names():
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
# ... modified code ...
assert checker.is_fastq("fastq.actually_ok_too") == problems.PROB_NO_PROBLEM
def test_fastq_checker_symlinks(tmpdir):
prev = tmpdir.chdir()
# Make a test file
p = tmpdir.join("bad.fastq")
p.write('')
os.symlink(p.basename, "good.fastq")
assert checker.is_fastq("bad.fastq") == problems.PROB_FILE_IS_FASTQ
assert checker.is_fastq("good.fastq") == problems.PROB_NO_PROBLEM
prev.chdir()
# ... rest of the code ...
|
c04b8932ec65480ba90dd4578d5f6bb8c3baa690
|
demosys/project/default.py
|
demosys/project/default.py
|
from demosys.project.base import BaseProject
from demosys.effects.registry import effects, parse_package_string
class Project(BaseProject):
"""
The project what will be assigned when no project are specified.
This is mainly used when the ``runeffect`` command is used
"""
def __init__(self, effect_package):
super().__init__()
self.path = effect_package
self.effect_package_name, self.effect_class_name = parse_package_string(effect_package)
self.effect_packages = [self.effect_package_name]
self.effect = None
def get_default_effect(self):
return self.effect
def create_resources(self):
pass
def create_effect_instances(self):
cls = effects.find_effect_class(self.path)
if not cls.runnable:
raise ValueError("Effect doesn't have the runnable flag set:", self.path)
self.effect = self.create_effect('default', cls.__name__)
|
from demosys.project.base import BaseProject
from demosys.effects.registry import effects, parse_package_string
class Project(BaseProject):
"""
The project what will be assigned when no project are specified.
This is mainly used when the ``runeffect`` command is used
"""
def __init__(self, effect_package):
super().__init__()
self.path = effect_package
self.effect_package_name, self.effect_class_name = parse_package_string(effect_package)
self.effect_packages = [self.effect_package_name]
self.effect = None
def get_default_effect(self):
return self.effect
def create_resources(self):
pass
def create_effect_instances(self):
if self.effect_class_name:
cls = effects.find_effect_class(self.path)
if not cls.runnable:
raise ValueError("Effect doesn't have the runnable flag set:", self.path)
else:
effect_package = effects.get_package(self.effect_package_name)
runnable_effects = effect_package.runnable_effects()
if not runnable_effects:
raise ValueError("No runnable effects found in effect package", self.effect_package_name)
cls = runnable_effects[-1]
self.effect = self.create_effect('default', cls.__name__)
|
Improve errors when effect packages or effects are not found
|
Improve errors when effect packages or effects are not found
|
Python
|
isc
|
Contraz/demosys-py
|
from demosys.project.base import BaseProject
from demosys.effects.registry import effects, parse_package_string
class Project(BaseProject):
"""
The project what will be assigned when no project are specified.
This is mainly used when the ``runeffect`` command is used
"""
def __init__(self, effect_package):
super().__init__()
self.path = effect_package
self.effect_package_name, self.effect_class_name = parse_package_string(effect_package)
self.effect_packages = [self.effect_package_name]
self.effect = None
def get_default_effect(self):
return self.effect
def create_resources(self):
pass
def create_effect_instances(self):
+ if self.effect_class_name:
- cls = effects.find_effect_class(self.path)
+ cls = effects.find_effect_class(self.path)
+ if not cls.runnable:
+ raise ValueError("Effect doesn't have the runnable flag set:", self.path)
+ else:
+ effect_package = effects.get_package(self.effect_package_name)
+ runnable_effects = effect_package.runnable_effects()
- if not cls.runnable:
+ if not runnable_effects:
- raise ValueError("Effect doesn't have the runnable flag set:", self.path)
+ raise ValueError("No runnable effects found in effect package", self.effect_package_name)
+
+ cls = runnable_effects[-1]
self.effect = self.create_effect('default', cls.__name__)
|
Improve errors when effect packages or effects are not found
|
## Code Before:
from demosys.project.base import BaseProject
from demosys.effects.registry import effects, parse_package_string
class Project(BaseProject):
"""
The project what will be assigned when no project are specified.
This is mainly used when the ``runeffect`` command is used
"""
def __init__(self, effect_package):
super().__init__()
self.path = effect_package
self.effect_package_name, self.effect_class_name = parse_package_string(effect_package)
self.effect_packages = [self.effect_package_name]
self.effect = None
def get_default_effect(self):
return self.effect
def create_resources(self):
pass
def create_effect_instances(self):
cls = effects.find_effect_class(self.path)
if not cls.runnable:
raise ValueError("Effect doesn't have the runnable flag set:", self.path)
self.effect = self.create_effect('default', cls.__name__)
## Instruction:
Improve errors when effect packages or effects are not found
## Code After:
from demosys.project.base import BaseProject
from demosys.effects.registry import effects, parse_package_string
class Project(BaseProject):
"""
The project what will be assigned when no project are specified.
This is mainly used when the ``runeffect`` command is used
"""
def __init__(self, effect_package):
super().__init__()
self.path = effect_package
self.effect_package_name, self.effect_class_name = parse_package_string(effect_package)
self.effect_packages = [self.effect_package_name]
self.effect = None
def get_default_effect(self):
return self.effect
def create_resources(self):
pass
def create_effect_instances(self):
if self.effect_class_name:
cls = effects.find_effect_class(self.path)
if not cls.runnable:
raise ValueError("Effect doesn't have the runnable flag set:", self.path)
else:
effect_package = effects.get_package(self.effect_package_name)
runnable_effects = effect_package.runnable_effects()
if not runnable_effects:
raise ValueError("No runnable effects found in effect package", self.effect_package_name)
cls = runnable_effects[-1]
self.effect = self.create_effect('default', cls.__name__)
|
...
def create_effect_instances(self):
if self.effect_class_name:
cls = effects.find_effect_class(self.path)
if not cls.runnable:
raise ValueError("Effect doesn't have the runnable flag set:", self.path)
else:
effect_package = effects.get_package(self.effect_package_name)
runnable_effects = effect_package.runnable_effects()
if not runnable_effects:
raise ValueError("No runnable effects found in effect package", self.effect_package_name)
cls = runnable_effects[-1]
...
|
0a7c6011607bccc61570c8f027c547425e8d53cf
|
iterm2_tools/tests/test_ipython.py
|
iterm2_tools/tests/test_ipython.py
|
from __future__ import print_function, division, absolute_import
import subprocess
import sys
import os
def test_IPython():
ipython = os.path.join(sys.prefix, 'bin', 'ipython')
if not os.path.exists(ipython):
raise Exception("IPython must be installed in %s to run the IPython tests" % os.path.join(sys.prefix, 'bin'))
commands = b"""\
1
raise Exception
undefined
def f():
pass
f()
"""
p = subprocess.Popen([ipython, '--quick', '--colors=LightBG', '--no-banner'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=commands)
assert (stdout, stderr) == (b'', b'')
|
from __future__ import print_function, division, absolute_import
import subprocess
import sys
import os
from IPython.testing.tools import get_ipython_cmd
def test_IPython():
ipython = get_ipython_cmd()
commands = b"""\
1
raise Exception
undefined
def f():
pass
f()
"""
p = subprocess.Popen([ipython, '--quick', '--colors=LightBG', '--no-banner'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=commands)
assert (stdout, stderr) == (b'', b'')
|
Use get_ipython_cmd in IPython test
|
Use get_ipython_cmd in IPython test
|
Python
|
mit
|
asmeurer/iterm2-tools
|
from __future__ import print_function, division, absolute_import
import subprocess
import sys
import os
+ from IPython.testing.tools import get_ipython_cmd
+
def test_IPython():
+ ipython = get_ipython_cmd()
- ipython = os.path.join(sys.prefix, 'bin', 'ipython')
- if not os.path.exists(ipython):
- raise Exception("IPython must be installed in %s to run the IPython tests" % os.path.join(sys.prefix, 'bin'))
-
commands = b"""\
1
raise Exception
undefined
def f():
pass
f()
"""
p = subprocess.Popen([ipython, '--quick', '--colors=LightBG', '--no-banner'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=commands)
assert (stdout, stderr) == (b'', b'')
|
Use get_ipython_cmd in IPython test
|
## Code Before:
from __future__ import print_function, division, absolute_import
import subprocess
import sys
import os
def test_IPython():
ipython = os.path.join(sys.prefix, 'bin', 'ipython')
if not os.path.exists(ipython):
raise Exception("IPython must be installed in %s to run the IPython tests" % os.path.join(sys.prefix, 'bin'))
commands = b"""\
1
raise Exception
undefined
def f():
pass
f()
"""
p = subprocess.Popen([ipython, '--quick', '--colors=LightBG', '--no-banner'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=commands)
assert (stdout, stderr) == (b'', b'')
## Instruction:
Use get_ipython_cmd in IPython test
## Code After:
from __future__ import print_function, division, absolute_import
import subprocess
import sys
import os
from IPython.testing.tools import get_ipython_cmd
def test_IPython():
ipython = get_ipython_cmd()
commands = b"""\
1
raise Exception
undefined
def f():
pass
f()
"""
p = subprocess.Popen([ipython, '--quick', '--colors=LightBG', '--no-banner'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate(input=commands)
assert (stdout, stderr) == (b'', b'')
|
...
from IPython.testing.tools import get_ipython_cmd
def test_IPython():
ipython = get_ipython_cmd()
...
|
55476a86ed482d2e1f473dc629848d2068225c73
|
keras/dtensor/__init__.py
|
keras/dtensor/__init__.py
|
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
# pylint: disable=g-direct-tensorflow-import, g-import-not-at-top
from tensorflow.dtensor import python as dtensor_api
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
Change keras to use dtensor public API.
|
Change keras to use dtensor public API.
PiperOrigin-RevId: 438605222
|
Python
|
apache-2.0
|
keras-team/keras,keras-team/keras
|
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
+ from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
- # pylint: disable=g-direct-tensorflow-import, g-import-not-at-top
- from tensorflow.dtensor import python as dtensor_api
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
-
-
|
Change keras to use dtensor public API.
|
## Code Before:
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
# pylint: disable=g-direct-tensorflow-import, g-import-not-at-top
from tensorflow.dtensor import python as dtensor_api
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
## Instruction:
Change keras to use dtensor public API.
## Code After:
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
# ... existing code ...
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# ... modified code ...
dtensor_api = None
# ... rest of the code ...
|
cbdcdf16285823a8e13a68c8e86d6957aa7aa6d8
|
kivy/tools/packaging/pyinstaller_hooks/pyi_rth_kivy.py
|
kivy/tools/packaging/pyinstaller_hooks/pyi_rth_kivy.py
|
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = '{};{}'.format(
sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins'))
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
|
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
|
Fix GST_PLUGIN_PATH in runtime hook
|
Fix GST_PLUGIN_PATH in runtime hook
- Only include `gst-plugins`
- Also, semicolon was only correct on Windows
|
Python
|
mit
|
inclement/kivy,inclement/kivy,kivy/kivy,kivy/kivy,akshayaurora/kivy,akshayaurora/kivy,kivy/kivy,matham/kivy,rnixx/kivy,matham/kivy,inclement/kivy,matham/kivy,matham/kivy,rnixx/kivy,akshayaurora/kivy,rnixx/kivy
|
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
+ os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
- os.environ['GST_PLUGIN_PATH'] = '{};{}'.format(
- sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins'))
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
|
Fix GST_PLUGIN_PATH in runtime hook
|
## Code Before:
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = '{};{}'.format(
sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins'))
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
## Instruction:
Fix GST_PLUGIN_PATH in runtime hook
## Code After:
import os
import sys
root = os.path.join(sys._MEIPASS, 'kivy_install')
os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data')
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
sys.path += [os.path.join(root, '_libs')]
if sys.platform == 'darwin':
sitepackages = os.path.join(sys._MEIPASS, 'sitepackages')
sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')]
os.putenv('GST_REGISTRY_FORK', 'no')
|
// ... existing code ...
os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules')
os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins')
os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin')
// ... rest of the code ...
|
3e1f5adf1402d6e9ddd4ef6a08f4a667be950e1d
|
src/ansible/admin.py
|
src/ansible/admin.py
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
admin.site.site_title = 'Ansible Admin'
admin.site.index_title = 'Admin Tool'
|
Add ansible app site title
|
Add ansible app site title
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
+ admin.site.site_title = 'Ansible Admin'
+ admin.site.index_title = 'Admin Tool'
|
Add ansible app site title
|
## Code Before:
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
## Instruction:
Add ansible app site title
## Code After:
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
admin.site.site_title = 'Ansible Admin'
admin.site.index_title = 'Admin Tool'
|
# ... existing code ...
admin.site.site_header = 'Ansible Admin'
admin.site.site_title = 'Ansible Admin'
admin.site.index_title = 'Admin Tool'
# ... rest of the code ...
|
fcee154a123c7f9db81c92efce6d4a425dc0a3b1
|
src/sentry/models/savedsearch.py
|
src/sentry/models/savedsearch.py
|
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
project = FlexibleForeignKey('sentry.Project')
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
is_default = models.BooleanField(default=False)
class Meta:
app_label = 'sentry'
db_table = 'sentry_savedsearch'
unique_together = (('project', 'name'),)
__repr__ = sane_repr('project_id', 'name')
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
savedsearch = FlexibleForeignKey('sentry.SavedSearch')
project = FlexibleForeignKey('sentry.Project')
user = FlexibleForeignKey('sentry.User')
class Meta:
unique_together = (('project', 'user'),)
app_label = 'sentry'
db_table = 'sentry_savedsearch_userdefault'
|
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
project = FlexibleForeignKey('sentry.Project')
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
is_default = models.BooleanField(default=False)
class Meta:
app_label = 'sentry'
db_table = 'sentry_savedsearch'
unique_together = (('project', 'name'),)
__repr__ = sane_repr('project_id', 'name')
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
__core__ = True
savedsearch = FlexibleForeignKey('sentry.SavedSearch')
project = FlexibleForeignKey('sentry.Project')
user = FlexibleForeignKey('sentry.User')
class Meta:
unique_together = (('project', 'user'),)
app_label = 'sentry'
db_table = 'sentry_savedsearch_userdefault'
|
Add SavedSarch to project defaults
|
Add SavedSarch to project defaults
|
Python
|
bsd-3-clause
|
mvaled/sentry,jean/sentry,mvaled/sentry,nicholasserra/sentry,BuildingLink/sentry,gencer/sentry,JackDanger/sentry,beeftornado/sentry,mvaled/sentry,zenefits/sentry,nicholasserra/sentry,daevaorn/sentry,gencer/sentry,beeftornado/sentry,fotinakis/sentry,JamesMura/sentry,JackDanger/sentry,fotinakis/sentry,looker/sentry,JackDanger/sentry,mvaled/sentry,gencer/sentry,gencer/sentry,JamesMura/sentry,ifduyue/sentry,alexm92/sentry,looker/sentry,jean/sentry,mitsuhiko/sentry,alexm92/sentry,ifduyue/sentry,mvaled/sentry,looker/sentry,BuildingLink/sentry,daevaorn/sentry,zenefits/sentry,mvaled/sentry,jean/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,ifduyue/sentry,daevaorn/sentry,jean/sentry,JamesMura/sentry,ifduyue/sentry,BuildingLink/sentry,daevaorn/sentry,fotinakis/sentry,nicholasserra/sentry,zenefits/sentry,JamesMura/sentry,BuildingLink/sentry,gencer/sentry,mitsuhiko/sentry,looker/sentry,ifduyue/sentry,zenefits/sentry,looker/sentry,jean/sentry,fotinakis/sentry,zenefits/sentry,alexm92/sentry
|
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
project = FlexibleForeignKey('sentry.Project')
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
is_default = models.BooleanField(default=False)
class Meta:
app_label = 'sentry'
db_table = 'sentry_savedsearch'
unique_together = (('project', 'name'),)
__repr__ = sane_repr('project_id', 'name')
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
+ __core__ = True
+
savedsearch = FlexibleForeignKey('sentry.SavedSearch')
project = FlexibleForeignKey('sentry.Project')
user = FlexibleForeignKey('sentry.User')
class Meta:
unique_together = (('project', 'user'),)
app_label = 'sentry'
db_table = 'sentry_savedsearch_userdefault'
|
Add SavedSarch to project defaults
|
## Code Before:
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
project = FlexibleForeignKey('sentry.Project')
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
is_default = models.BooleanField(default=False)
class Meta:
app_label = 'sentry'
db_table = 'sentry_savedsearch'
unique_together = (('project', 'name'),)
__repr__ = sane_repr('project_id', 'name')
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
savedsearch = FlexibleForeignKey('sentry.SavedSearch')
project = FlexibleForeignKey('sentry.Project')
user = FlexibleForeignKey('sentry.User')
class Meta:
unique_together = (('project', 'user'),)
app_label = 'sentry'
db_table = 'sentry_savedsearch_userdefault'
## Instruction:
Add SavedSarch to project defaults
## Code After:
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from sentry.db.models import FlexibleForeignKey, Model, sane_repr
class SavedSearch(Model):
"""
A saved search query.
"""
__core__ = True
project = FlexibleForeignKey('sentry.Project')
name = models.CharField(max_length=128)
query = models.TextField()
date_added = models.DateTimeField(default=timezone.now)
is_default = models.BooleanField(default=False)
class Meta:
app_label = 'sentry'
db_table = 'sentry_savedsearch'
unique_together = (('project', 'name'),)
__repr__ = sane_repr('project_id', 'name')
class SavedSearchUserDefault(Model):
"""
Indicates the default saved search for a given user
"""
__core__ = True
savedsearch = FlexibleForeignKey('sentry.SavedSearch')
project = FlexibleForeignKey('sentry.Project')
user = FlexibleForeignKey('sentry.User')
class Meta:
unique_together = (('project', 'user'),)
app_label = 'sentry'
db_table = 'sentry_savedsearch_userdefault'
|
// ... existing code ...
"""
__core__ = True
savedsearch = FlexibleForeignKey('sentry.SavedSearch')
// ... rest of the code ...
|
394ed06411d3ca3ada66aab3bee796682895acc0
|
cla_backend/apps/core/testing.py
|
cla_backend/apps/core/testing.py
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
|
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
- cursor.execute('CREATE EXTENSION pgcrypto')
+ cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
|
## Code Before:
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
## Instruction:
Install pgcrypto PGSQL extension but only if it does not exist already (e.g. from template1)
## Code After:
from django.core.management import call_command
from django.test.utils import get_runner
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
# use jenkins runner if present otherwise the default django one
if 'django_jenkins' in settings.INSTALLED_APPS:
base_runner = 'django_jenkins.runner.CITestSuiteRunner'
else:
base_runner = 'django.test.runner.DiscoverRunner'
class CLADiscoverRunner(get_runner(settings, base_runner)):
"""
Overrides the default Runner and loads the initial_groups fixture.
This is because migrations are switched off during testing but
we do need `initial_groups` in order for the tests to pass.
"""
def setup_databases(self, **kwargs):
ret = super(CLADiscoverRunner, self).setup_databases(**kwargs)
connection = connections[DEFAULT_DB_ALIAS]
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
return ret
|
# ... existing code ...
cursor = connection.cursor()
cursor.execute('CREATE EXTENSION IF NOT EXISTS pgcrypto')
call_command('loaddata', 'initial_groups')
# ... rest of the code ...
|
3ab25e30e26ba7edf2f732ff0d4fa42b1446f6dc
|
txampext/test/test_axiomtypes.py
|
txampext/test/test_axiomtypes.py
|
try:
from txampext import axiomtypes; axiomtypes
from axiom import attributes
except ImportError:
axiomtypes = None
from twisted.protocols import amp
from twisted.trial import unittest
class TypeForTests(unittest.TestCase):
skip = axiomtypes is None
def _test_typeFor(self, attr, expectedType, **kwargs):
asAMP = axiomtypes.typeFor(attr, **kwargs)
self.assertTrue(isinstance(asAMP, expectedType))
return asAMP
def test_optional(self):
asAMP = axiomtypes.typeFor(attributes.text(), optional=True)
self.assertTrue(asAMP.optional)
def test_text(self):
self._test_typeFor(attributes.text(), amp.Unicode)
def test_bytes(self):
self._test_typeFor(attributes.bytes(), amp.String)
def test_integer(self):
self._test_typeFor(attributes.integer(), amp.Integer)
def test_decimals(self):
for precision in range(1, 11):
attr = getattr(attributes, "point{}decimal".format(precision))
self._test_typeFor(attr(), amp.Decimal)
self._test_typeFor(attributes.money(), amp.Decimal)
def test_float(self):
self._test_typeFor(attributes.ieee754_double(), amp.Float)
def test_timestamp(self):
self._test_typeFor(attributes.timestamp(), amp.DateTime)
|
try:
from txampext import axiomtypes; axiomtypes
from axiom import attributes
except ImportError: # pragma: no cover
axiomtypes = None
from twisted.protocols import amp
from twisted.trial import unittest
class TypeForTests(unittest.TestCase):
skip = axiomtypes is None
def _test_typeFor(self, attr, expectedType, **kwargs):
asAMP = axiomtypes.typeFor(attr, **kwargs)
self.assertTrue(isinstance(asAMP, expectedType))
return asAMP
def test_optional(self):
asAMP = axiomtypes.typeFor(attributes.text(), optional=True)
self.assertTrue(asAMP.optional)
def test_text(self):
self._test_typeFor(attributes.text(), amp.Unicode)
def test_bytes(self):
self._test_typeFor(attributes.bytes(), amp.String)
def test_integer(self):
self._test_typeFor(attributes.integer(), amp.Integer)
def test_decimals(self):
for precision in range(1, 11):
attr = getattr(attributes, "point{}decimal".format(precision))
self._test_typeFor(attr(), amp.Decimal)
self._test_typeFor(attributes.money(), amp.Decimal)
def test_float(self):
self._test_typeFor(attributes.ieee754_double(), amp.Float)
def test_timestamp(self):
self._test_typeFor(attributes.timestamp(), amp.DateTime)
|
Add 'no cover' pragma to hide bogus missing code coverage
|
Add 'no cover' pragma to hide bogus missing code coverage
|
Python
|
isc
|
lvh/txampext
|
try:
from txampext import axiomtypes; axiomtypes
from axiom import attributes
- except ImportError:
+ except ImportError: # pragma: no cover
axiomtypes = None
from twisted.protocols import amp
from twisted.trial import unittest
class TypeForTests(unittest.TestCase):
skip = axiomtypes is None
def _test_typeFor(self, attr, expectedType, **kwargs):
asAMP = axiomtypes.typeFor(attr, **kwargs)
self.assertTrue(isinstance(asAMP, expectedType))
return asAMP
def test_optional(self):
asAMP = axiomtypes.typeFor(attributes.text(), optional=True)
self.assertTrue(asAMP.optional)
def test_text(self):
self._test_typeFor(attributes.text(), amp.Unicode)
def test_bytes(self):
self._test_typeFor(attributes.bytes(), amp.String)
def test_integer(self):
self._test_typeFor(attributes.integer(), amp.Integer)
def test_decimals(self):
for precision in range(1, 11):
attr = getattr(attributes, "point{}decimal".format(precision))
self._test_typeFor(attr(), amp.Decimal)
self._test_typeFor(attributes.money(), amp.Decimal)
def test_float(self):
self._test_typeFor(attributes.ieee754_double(), amp.Float)
def test_timestamp(self):
self._test_typeFor(attributes.timestamp(), amp.DateTime)
|
Add 'no cover' pragma to hide bogus missing code coverage
|
## Code Before:
try:
from txampext import axiomtypes; axiomtypes
from axiom import attributes
except ImportError:
axiomtypes = None
from twisted.protocols import amp
from twisted.trial import unittest
class TypeForTests(unittest.TestCase):
skip = axiomtypes is None
def _test_typeFor(self, attr, expectedType, **kwargs):
asAMP = axiomtypes.typeFor(attr, **kwargs)
self.assertTrue(isinstance(asAMP, expectedType))
return asAMP
def test_optional(self):
asAMP = axiomtypes.typeFor(attributes.text(), optional=True)
self.assertTrue(asAMP.optional)
def test_text(self):
self._test_typeFor(attributes.text(), amp.Unicode)
def test_bytes(self):
self._test_typeFor(attributes.bytes(), amp.String)
def test_integer(self):
self._test_typeFor(attributes.integer(), amp.Integer)
def test_decimals(self):
for precision in range(1, 11):
attr = getattr(attributes, "point{}decimal".format(precision))
self._test_typeFor(attr(), amp.Decimal)
self._test_typeFor(attributes.money(), amp.Decimal)
def test_float(self):
self._test_typeFor(attributes.ieee754_double(), amp.Float)
def test_timestamp(self):
self._test_typeFor(attributes.timestamp(), amp.DateTime)
## Instruction:
Add 'no cover' pragma to hide bogus missing code coverage
## Code After:
try:
from txampext import axiomtypes; axiomtypes
from axiom import attributes
except ImportError: # pragma: no cover
axiomtypes = None
from twisted.protocols import amp
from twisted.trial import unittest
class TypeForTests(unittest.TestCase):
skip = axiomtypes is None
def _test_typeFor(self, attr, expectedType, **kwargs):
asAMP = axiomtypes.typeFor(attr, **kwargs)
self.assertTrue(isinstance(asAMP, expectedType))
return asAMP
def test_optional(self):
asAMP = axiomtypes.typeFor(attributes.text(), optional=True)
self.assertTrue(asAMP.optional)
def test_text(self):
self._test_typeFor(attributes.text(), amp.Unicode)
def test_bytes(self):
self._test_typeFor(attributes.bytes(), amp.String)
def test_integer(self):
self._test_typeFor(attributes.integer(), amp.Integer)
def test_decimals(self):
for precision in range(1, 11):
attr = getattr(attributes, "point{}decimal".format(precision))
self._test_typeFor(attr(), amp.Decimal)
self._test_typeFor(attributes.money(), amp.Decimal)
def test_float(self):
self._test_typeFor(attributes.ieee754_double(), amp.Float)
def test_timestamp(self):
self._test_typeFor(attributes.timestamp(), amp.DateTime)
|
// ... existing code ...
from axiom import attributes
except ImportError: # pragma: no cover
axiomtypes = None
// ... rest of the code ...
|
1f3730ac4d531ca0d582a8b8bded871acb409847
|
backend/api-server/warehaus_api/events/models.py
|
backend/api-server/warehaus_api/events/models.py
|
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
|
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
user_id = user_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
|
Fix api-server events not saving the user ID
|
Fix api-server events not saving the user ID
|
Python
|
agpl-3.0
|
labsome/labsome,warehaus/warehaus,warehaus/warehaus,labsome/labsome,warehaus/warehaus,labsome/labsome
|
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
+ user_id = user_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
|
Fix api-server events not saving the user ID
|
## Code Before:
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
## Instruction:
Fix api-server events not saving the user ID
## Code After:
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
user_id = user_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
|
...
obj_id = obj_id,
user_id = user_id,
interested_ids = interested_ids,
...
|
c7d2e917df5e0c2182e351b5157271b6e62a06cd
|
app/soc/modules/gsoc/models/timeline.py
|
app/soc/modules/gsoc/models/timeline.py
|
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Application Review Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Student Application Matched Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Organizations Review Student Applications Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Students Matched to Mentors Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
Change verbage on program profile info.
|
Change verbage on program profile info.
Fixes issue 1601.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
- verbose_name=ugettext('Application Review Deadline'))
+ verbose_name=ugettext('Organizations Review Student Applications Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
- verbose_name=ugettext('Student Application Matched Deadline'))
+ verbose_name=ugettext('Students Matched to Mentors Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
Change verbage on program profile info.
|
## Code Before:
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Application Review Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Student Application Matched Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
## Instruction:
Change verbage on program profile info.
## Code After:
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Organizations Review Student Applications Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Students Matched to Mentors Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
# ... existing code ...
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Organizations Review Student Applications Deadline'))
# ... modified code ...
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Students Matched to Mentors Deadline'))
# ... rest of the code ...
|
ba1de19895b001069966a10d9c72b8485c4b4195
|
tests/testapp/views.py
|
tests/testapp/views.py
|
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404, render
from django.utils.html import format_html, mark_safe
from content_editor.contents import contents_for_mptt_item
from content_editor.renderer import PluginRenderer
from .models import Page, RichText, Image, Snippet, External
renderer = PluginRenderer()
renderer.register(
RichText,
lambda plugin: mark_safe(plugin.text),
)
renderer.register(
Image,
lambda plugin: format_html(
'<figure><img src="{}" alt=""/><figcaption>{}</figcaption></figure>',
plugin.image.url,
plugin.caption,
),
)
def page_detail(request, path=None):
page = get_object_or_404(
Page.objects.active(),
path='/{}/'.format(path) if path else '/',
)
page.activate_language(request)
contents = contents_for_mptt_item(
page,
[RichText, Image, Snippet, External],
)
return render(request, page.template.template_name, {
'page': page,
'content': {
region.key: renderer.render(contents[region.key])
for region in page.regions
},
})
|
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404, render
from django.utils.html import format_html
from content_editor.contents import contents_for_mptt_item
from content_editor.renderer import PluginRenderer
from feincms3 import plugins
from .models import Page, RichText, Image, Snippet, External
renderer = PluginRenderer()
renderer.register(
RichText,
plugins.render_richtext,
)
renderer.register(
Image,
lambda plugin: format_html(
'<figure><img src="{}" alt=""/><figcaption>{}</figcaption></figure>',
plugin.image.url,
plugin.caption,
),
)
def page_detail(request, path=None):
page = get_object_or_404(
Page.objects.active(),
path='/{}/'.format(path) if path else '/',
)
page.activate_language(request)
contents = contents_for_mptt_item(
page,
[RichText, Image, Snippet, External],
)
return render(request, page.template.template_name, {
'page': page,
'content': {
region.key: renderer.render(contents[region.key])
for region in page.regions
},
})
|
Use render_richtext in test suite
|
Use render_richtext in test suite
|
Python
|
bsd-3-clause
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404, render
- from django.utils.html import format_html, mark_safe
+ from django.utils.html import format_html
from content_editor.contents import contents_for_mptt_item
from content_editor.renderer import PluginRenderer
+
+ from feincms3 import plugins
from .models import Page, RichText, Image, Snippet, External
renderer = PluginRenderer()
renderer.register(
RichText,
- lambda plugin: mark_safe(plugin.text),
+ plugins.render_richtext,
)
renderer.register(
Image,
lambda plugin: format_html(
'<figure><img src="{}" alt=""/><figcaption>{}</figcaption></figure>',
plugin.image.url,
plugin.caption,
),
)
def page_detail(request, path=None):
page = get_object_or_404(
Page.objects.active(),
path='/{}/'.format(path) if path else '/',
)
page.activate_language(request)
contents = contents_for_mptt_item(
page,
[RichText, Image, Snippet, External],
)
return render(request, page.template.template_name, {
'page': page,
'content': {
region.key: renderer.render(contents[region.key])
for region in page.regions
},
})
|
Use render_richtext in test suite
|
## Code Before:
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404, render
from django.utils.html import format_html, mark_safe
from content_editor.contents import contents_for_mptt_item
from content_editor.renderer import PluginRenderer
from .models import Page, RichText, Image, Snippet, External
renderer = PluginRenderer()
renderer.register(
RichText,
lambda plugin: mark_safe(plugin.text),
)
renderer.register(
Image,
lambda plugin: format_html(
'<figure><img src="{}" alt=""/><figcaption>{}</figcaption></figure>',
plugin.image.url,
plugin.caption,
),
)
def page_detail(request, path=None):
page = get_object_or_404(
Page.objects.active(),
path='/{}/'.format(path) if path else '/',
)
page.activate_language(request)
contents = contents_for_mptt_item(
page,
[RichText, Image, Snippet, External],
)
return render(request, page.template.template_name, {
'page': page,
'content': {
region.key: renderer.render(contents[region.key])
for region in page.regions
},
})
## Instruction:
Use render_richtext in test suite
## Code After:
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404, render
from django.utils.html import format_html
from content_editor.contents import contents_for_mptt_item
from content_editor.renderer import PluginRenderer
from feincms3 import plugins
from .models import Page, RichText, Image, Snippet, External
renderer = PluginRenderer()
renderer.register(
RichText,
plugins.render_richtext,
)
renderer.register(
Image,
lambda plugin: format_html(
'<figure><img src="{}" alt=""/><figcaption>{}</figcaption></figure>',
plugin.image.url,
plugin.caption,
),
)
def page_detail(request, path=None):
page = get_object_or_404(
Page.objects.active(),
path='/{}/'.format(path) if path else '/',
)
page.activate_language(request)
contents = contents_for_mptt_item(
page,
[RichText, Image, Snippet, External],
)
return render(request, page.template.template_name, {
'page': page,
'content': {
region.key: renderer.render(contents[region.key])
for region in page.regions
},
})
|
...
from django.shortcuts import get_object_or_404, render
from django.utils.html import format_html
...
from content_editor.renderer import PluginRenderer
from feincms3 import plugins
...
RichText,
plugins.render_richtext,
)
...
|
c326becad43949999d151cd1e10fcb75f9d2b148
|
lib/constants.py
|
lib/constants.py
|
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
JSON_PUBSUB_PORT = 15596
|
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
|
Add missing constant for ssl listener.
|
Add missing constant for ssl listener.
|
Python
|
apache-2.0
|
MediaMath/qasino,MediaMath/qasino
|
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
+ HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
|
Add missing constant for ssl listener.
|
## Code Before:
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
JSON_PUBSUB_PORT = 15596
## Instruction:
Add missing constant for ssl listener.
## Code After:
SQL_PORT = 15000
JSON_RPC_PORT = 15598
HTTP_PORT = 15597
HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
|
# ... existing code ...
HTTP_PORT = 15597
HTTPS_PORT = 443
JSON_PUBSUB_PORT = 15596
# ... rest of the code ...
|
73c7161d4414a9259ee6123ee3d3540153f30b9e
|
purchase_edi_file/models/purchase_order_line.py
|
purchase_edi_file/models/purchase_order_line.py
|
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
seller = product._select_seller(partner_id=partner)
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
|
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
seller = product._select_seller(
partner_id=partner, quantity=line.product_uom_qty
)
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
|
Add qty when searching seller because even if not passed a verification is made by default in _select_seller
|
Add qty when searching seller because even if not passed a verification is made by default in _select_seller
|
Python
|
agpl-3.0
|
akretion/ak-odoo-incubator,akretion/ak-odoo-incubator,akretion/ak-odoo-incubator,akretion/ak-odoo-incubator
|
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
- seller = product._select_seller(partner_id=partner)
+ seller = product._select_seller(
+ partner_id=partner, quantity=line.product_uom_qty
+ )
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
|
Add qty when searching seller because even if not passed a verification is made by default in _select_seller
|
## Code Before:
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
seller = product._select_seller(partner_id=partner)
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
## Instruction:
Add qty when searching seller because even if not passed a verification is made by default in _select_seller
## Code After:
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
seller = product._select_seller(
partner_id=partner, quantity=line.product_uom_qty
)
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
|
// ... existing code ...
product = line.product_id
seller = product._select_seller(
partner_id=partner, quantity=line.product_uom_qty
)
purchase_edi = seller.purchase_edi_id
// ... rest of the code ...
|
bc6e6f0faec8405849c896b0661c181e9853359d
|
match/management/commands/import-users.py
|
match/management/commands/import-users.py
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
Delete users before importing them
|
Delete users before importing them
|
Python
|
mit
|
maxf/address-matcher,maxf/address-matcher,maxf/address-matcher,maxf/address-matcher
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
+ User.objects.all().delete()
+
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
Delete users before importing them
|
## Code Before:
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
## Instruction:
Delete users before importing them
## Code After:
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
...
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
...
|
72941398fd2e78cbf5d994b4bf8683c4bdefaab9
|
utils/travis_runner.py
|
utils/travis_runner.py
|
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"tutorial_semipar_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
|
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
# if __name__ == "__main__":
# os.chdir("promotion/grmpy_tutorial_notebook")
# cmd = [
# "jupyter",
# "nbconvert",
# "--execute",
# "tutorial_semipar_notebook.ipynb",
# "--ExecutePreprocessor.timeout=-1",
# ]
# subprocess.check_call(cmd)
|
Comment out semipar notebook in travis runner until pip build us updated.
|
Comment out semipar notebook in travis runner until pip build us updated.
|
Python
|
mit
|
grmToolbox/grmpy
|
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
- if __name__ == "__main__":
+ # if __name__ == "__main__":
- os.chdir("promotion/grmpy_tutorial_notebook")
+ # os.chdir("promotion/grmpy_tutorial_notebook")
- cmd = [
+ # cmd = [
- "jupyter",
+ # "jupyter",
- "nbconvert",
+ # "nbconvert",
- "--execute",
+ # "--execute",
- "tutorial_semipar_notebook.ipynb",
+ # "tutorial_semipar_notebook.ipynb",
- "--ExecutePreprocessor.timeout=-1",
+ # "--ExecutePreprocessor.timeout=-1",
- ]
+ # ]
- subprocess.check_call(cmd)
+ # subprocess.check_call(cmd)
|
Comment out semipar notebook in travis runner until pip build us updated.
|
## Code Before:
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"tutorial_semipar_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
## Instruction:
Comment out semipar notebook in travis runner until pip build us updated.
## Code After:
"""This script manages all tasks for the TRAVIS build server."""
import os
import subprocess
if __name__ == "__main__":
os.chdir("promotion/grmpy_tutorial_notebook")
cmd = [
"jupyter",
"nbconvert",
"--execute",
"grmpy_tutorial_notebook.ipynb",
"--ExecutePreprocessor.timeout=-1",
]
subprocess.check_call(cmd)
os.chdir("../..")
# if __name__ == "__main__":
# os.chdir("promotion/grmpy_tutorial_notebook")
# cmd = [
# "jupyter",
# "nbconvert",
# "--execute",
# "tutorial_semipar_notebook.ipynb",
# "--ExecutePreprocessor.timeout=-1",
# ]
# subprocess.check_call(cmd)
|
// ... existing code ...
# if __name__ == "__main__":
# os.chdir("promotion/grmpy_tutorial_notebook")
# cmd = [
# "jupyter",
# "nbconvert",
# "--execute",
# "tutorial_semipar_notebook.ipynb",
# "--ExecutePreprocessor.timeout=-1",
# ]
# subprocess.check_call(cmd)
// ... rest of the code ...
|
517c8978c33d7e9f0251985f2ca39b6f2514ae9e
|
hack/boxee/skin/boxee/720p/scripts/boxeehack_clear_cache.py
|
hack/boxee/skin/boxee/720p/scripts/boxeehack_clear_cache.py
|
import os,mc
import xbmc, xbmcgui
def fanart_function():
if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"):
pass
def thumbnail_function():
if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"):
os.system("rm /data/etc/.fanart")
os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm")
mc.ShowDialogNotification("Clearing thumbnail cache")
if (__name__ == "__main__"):
section = sys.argv[1]
if section == "fanart":
fanart_function()
if section == "thumbnail":
thumbnail_function()
|
import os,mc
import xbmc, xbmcgui
def fanart_function():
if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"):
pass
def thumbnail_function():
if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"):
os.system("rm %s" % xbmc.translatePath('special://profile/.fanart'))
os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm")
mc.ShowDialogNotification("Clearing thumbnail cache")
if (__name__ == "__main__"):
section = sys.argv[1]
if section == "fanart":
fanart_function()
if section == "thumbnail":
thumbnail_function()
|
Correct clearing of fanart cache
|
Correct clearing of fanart cache
|
Python
|
mit
|
cigamit/boxeehack,cigamit/boxeehack,vLBrian/boxeehack-cigamit,vLBrian/boxeehack-cigamit
|
import os,mc
import xbmc, xbmcgui
def fanart_function():
if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"):
pass
def thumbnail_function():
if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"):
- os.system("rm /data/etc/.fanart")
+ os.system("rm %s" % xbmc.translatePath('special://profile/.fanart'))
os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm")
mc.ShowDialogNotification("Clearing thumbnail cache")
if (__name__ == "__main__"):
section = sys.argv[1]
if section == "fanart":
fanart_function()
if section == "thumbnail":
thumbnail_function()
|
Correct clearing of fanart cache
|
## Code Before:
import os,mc
import xbmc, xbmcgui
def fanart_function():
if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"):
pass
def thumbnail_function():
if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"):
os.system("rm /data/etc/.fanart")
os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm")
mc.ShowDialogNotification("Clearing thumbnail cache")
if (__name__ == "__main__"):
section = sys.argv[1]
if section == "fanart":
fanart_function()
if section == "thumbnail":
thumbnail_function()
## Instruction:
Correct clearing of fanart cache
## Code After:
import os,mc
import xbmc, xbmcgui
def fanart_function():
if mc.ShowDialogConfirm("Clear fanart cache", "Are you sure you want to clear the fanart cache?", "Cancel", "OK"):
pass
def thumbnail_function():
if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"):
os.system("rm %s" % xbmc.translatePath('special://profile/.fanart'))
os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm")
mc.ShowDialogNotification("Clearing thumbnail cache")
if (__name__ == "__main__"):
section = sys.argv[1]
if section == "fanart":
fanart_function()
if section == "thumbnail":
thumbnail_function()
|
# ... existing code ...
if mc.ShowDialogConfirm("Clear thumbnail cache", "Are you sure you want to clear the thumbnail cache?", "Cancel", "OK"):
os.system("rm %s" % xbmc.translatePath('special://profile/.fanart'))
os.system("find /data/.boxee/UserData/profiles/*/Thumbnails/ -name \*.tbn | xargs rm")
# ... rest of the code ...
|
a3a34026369391837d31d7424e78de207b98340d
|
preferences/views.py
|
preferences/views.py
|
from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = Person.objects.filter(memberships__organization__name='Florida Senate')
representatives = Person.objects.filter(memberships__organization__name='Florida House of Representatives')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representatives': representatives}
)
|
from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from tot.utils import get_current_people
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = get_current_people(position='senator')
representatives = get_current_people(position='representatives')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representatives': representatives}
)
|
Use new util function for getting current people
|
Use new
util function for getting current people
|
Python
|
mit
|
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
|
from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
+
+ from tot.utils import get_current_people
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
- senators = Person.objects.filter(memberships__organization__name='Florida Senate')
- representatives = Person.objects.filter(memberships__organization__name='Florida House of Representatives')
+ senators = get_current_people(position='senator')
+ representatives = get_current_people(position='representatives')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representatives': representatives}
)
|
Use new util function for getting current people
|
## Code Before:
from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = Person.objects.filter(memberships__organization__name='Florida Senate')
representatives = Person.objects.filter(memberships__organization__name='Florida House of Representatives')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representatives': representatives}
)
## Instruction:
Use new util function for getting current people
## Code After:
from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from tot.utils import get_current_people
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = get_current_people(position='senator')
representatives = get_current_people(position='representatives')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representatives': representatives}
)
|
# ... existing code ...
# from django.views.generic import TemplateView
from tot.utils import get_current_people
# ... modified code ...
senators = get_current_people(position='senator')
representatives = get_current_people(position='representatives')
# ... rest of the code ...
|
b5e13cb92f539545873d59553c03e1523eac1dbb
|
recipes/kaleido-core/run_test.py
|
recipes/kaleido-core/run_test.py
|
from subprocess import Popen, PIPE
import json
import platform
# Remove "sys.exit" after feedstock creation when running
# on linux-anvil-cos7-x86_64 image
if platform.system() == "Linux":
import sys
sys.exit(0)
if platform.system() == "Windows":
ext = ".cmd"
else:
ext = ""
p = Popen(
['kaleido' + ext, "plotly", "--disable-gpu"],
stdout=PIPE, stdin=PIPE, stderr=PIPE,
text=True
)
stdout_data = p.communicate(
input=json.dumps({"data": {"data": []}, "format": "png"})
)[0]
assert "iVBORw" in stdout_data
|
from subprocess import Popen, PIPE
import json
import platform
# Remove "sys.exit" after feedstock creation when running
# on linux-anvil-cos7-x86_64 image
if platform.system() == "Linux":
import sys
sys.exit(0)
if platform.system() == "Windows":
ext = ".cmd"
else:
ext = ""
p = Popen(
['kaleido' + ext, "plotly", "--disable-gpu", "--no-sandbox", "--disable-breakpad"],
stdout=PIPE, stdin=PIPE, stderr=PIPE,
text=True
)
stdout_data = p.communicate(
input=json.dumps({"data": {"data": []}, "format": "png"})
)[0]
assert "iVBORw" in stdout_data
|
Fix hanging test on MacOS
|
Fix hanging test on MacOS
|
Python
|
bsd-3-clause
|
kwilcox/staged-recipes,ocefpaf/staged-recipes,stuertz/staged-recipes,johanneskoester/staged-recipes,jochym/staged-recipes,SylvainCorlay/staged-recipes,goanpeca/staged-recipes,igortg/staged-recipes,mariusvniekerk/staged-recipes,conda-forge/staged-recipes,patricksnape/staged-recipes,goanpeca/staged-recipes,johanneskoester/staged-recipes,scopatz/staged-recipes,ocefpaf/staged-recipes,ReimarBauer/staged-recipes,scopatz/staged-recipes,hadim/staged-recipes,hadim/staged-recipes,jochym/staged-recipes,patricksnape/staged-recipes,mariusvniekerk/staged-recipes,conda-forge/staged-recipes,stuertz/staged-recipes,jakirkham/staged-recipes,ReimarBauer/staged-recipes,igortg/staged-recipes,jakirkham/staged-recipes,SylvainCorlay/staged-recipes,kwilcox/staged-recipes
|
from subprocess import Popen, PIPE
import json
import platform
# Remove "sys.exit" after feedstock creation when running
# on linux-anvil-cos7-x86_64 image
if platform.system() == "Linux":
import sys
sys.exit(0)
if platform.system() == "Windows":
ext = ".cmd"
else:
ext = ""
p = Popen(
- ['kaleido' + ext, "plotly", "--disable-gpu"],
+ ['kaleido' + ext, "plotly", "--disable-gpu", "--no-sandbox", "--disable-breakpad"],
stdout=PIPE, stdin=PIPE, stderr=PIPE,
text=True
)
stdout_data = p.communicate(
input=json.dumps({"data": {"data": []}, "format": "png"})
)[0]
assert "iVBORw" in stdout_data
|
Fix hanging test on MacOS
|
## Code Before:
from subprocess import Popen, PIPE
import json
import platform
# Remove "sys.exit" after feedstock creation when running
# on linux-anvil-cos7-x86_64 image
if platform.system() == "Linux":
import sys
sys.exit(0)
if platform.system() == "Windows":
ext = ".cmd"
else:
ext = ""
p = Popen(
['kaleido' + ext, "plotly", "--disable-gpu"],
stdout=PIPE, stdin=PIPE, stderr=PIPE,
text=True
)
stdout_data = p.communicate(
input=json.dumps({"data": {"data": []}, "format": "png"})
)[0]
assert "iVBORw" in stdout_data
## Instruction:
Fix hanging test on MacOS
## Code After:
from subprocess import Popen, PIPE
import json
import platform
# Remove "sys.exit" after feedstock creation when running
# on linux-anvil-cos7-x86_64 image
if platform.system() == "Linux":
import sys
sys.exit(0)
if platform.system() == "Windows":
ext = ".cmd"
else:
ext = ""
p = Popen(
['kaleido' + ext, "plotly", "--disable-gpu", "--no-sandbox", "--disable-breakpad"],
stdout=PIPE, stdin=PIPE, stderr=PIPE,
text=True
)
stdout_data = p.communicate(
input=json.dumps({"data": {"data": []}, "format": "png"})
)[0]
assert "iVBORw" in stdout_data
|
# ... existing code ...
p = Popen(
['kaleido' + ext, "plotly", "--disable-gpu", "--no-sandbox", "--disable-breakpad"],
stdout=PIPE, stdin=PIPE, stderr=PIPE,
# ... rest of the code ...
|
52d32849f4cd38ca7a0fcfc0418e9e9580dd426a
|
kimochiconsumer/views.py
|
kimochiconsumer/views.py
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
Use the gallery_image method for required information
|
Use the gallery_image method for required information
|
Python
|
mit
|
matslindh/kimochi-consumer
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
- data = request.kimochi.gallery(request.matchdict['gallery_id'])
+ data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
Use the gallery_image method for required information
|
## Code Before:
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
## Instruction:
Use the gallery_image method for required information
## Code After:
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
...
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
...
|
313aee17c8e2e1c86b96b40017ac4618c66df463
|
__init__.py
|
__init__.py
|
ENTITIES_INDEX = ['men', 'foy']
# Some variables needed by the test case plugins
CURRENCY = u"DT"
# Some variables needed by the test case graph widget
# REVENUES_CATEGORIES
XAXIS_PROPERTIES = { 'sali': {
'name' : 'sal',
'typ_tot' : {'salsuperbrut' : 'Salaire super brut',
'salbrut': 'Salaire brut',
'sal': 'Salaire imposable',
'salnet': 'Salaire net'},
'typ_tot_default' : 'sal'},
}
# Some variables used by other plugins
|
ENTITIES_INDEX = ['men', 'foy']
# Some variables needed by the test case plugins
CURRENCY = u"DT"
# Some variables needed by the test case graph widget
REVENUES_CATEGORIES = {'imposable' : ['sal',]}
XAXIS_PROPERTIES = { 'sali': {
'name' : 'sal',
'typ_tot' : {'salsuperbrut' : 'Salaire super brut',
'salbrut': 'Salaire brut',
'sal': 'Salaire imposable',
'salnet': 'Salaire net'},
'typ_tot_default' : 'sal'},
}
# Some variables used by other plugins
|
Generalize graph and some new example scripts
|
Generalize graph and some new example scripts
|
Python
|
agpl-3.0
|
openfisca/openfisca-tunisia,openfisca/openfisca-tunisia
|
ENTITIES_INDEX = ['men', 'foy']
# Some variables needed by the test case plugins
CURRENCY = u"DT"
# Some variables needed by the test case graph widget
- # REVENUES_CATEGORIES
+
+
+ REVENUES_CATEGORIES = {'imposable' : ['sal',]}
+
XAXIS_PROPERTIES = { 'sali': {
'name' : 'sal',
'typ_tot' : {'salsuperbrut' : 'Salaire super brut',
'salbrut': 'Salaire brut',
'sal': 'Salaire imposable',
'salnet': 'Salaire net'},
'typ_tot_default' : 'sal'},
}
# Some variables used by other plugins
|
Generalize graph and some new example scripts
|
## Code Before:
ENTITIES_INDEX = ['men', 'foy']
# Some variables needed by the test case plugins
CURRENCY = u"DT"
# Some variables needed by the test case graph widget
# REVENUES_CATEGORIES
XAXIS_PROPERTIES = { 'sali': {
'name' : 'sal',
'typ_tot' : {'salsuperbrut' : 'Salaire super brut',
'salbrut': 'Salaire brut',
'sal': 'Salaire imposable',
'salnet': 'Salaire net'},
'typ_tot_default' : 'sal'},
}
# Some variables used by other plugins
## Instruction:
Generalize graph and some new example scripts
## Code After:
ENTITIES_INDEX = ['men', 'foy']
# Some variables needed by the test case plugins
CURRENCY = u"DT"
# Some variables needed by the test case graph widget
REVENUES_CATEGORIES = {'imposable' : ['sal',]}
XAXIS_PROPERTIES = { 'sali': {
'name' : 'sal',
'typ_tot' : {'salsuperbrut' : 'Salaire super brut',
'salbrut': 'Salaire brut',
'sal': 'Salaire imposable',
'salnet': 'Salaire net'},
'typ_tot_default' : 'sal'},
}
# Some variables used by other plugins
|
...
# Some variables needed by the test case graph widget
REVENUES_CATEGORIES = {'imposable' : ['sal',]}
...
|
1b7509d8bd624bbf33352f622d8c03be6f3e35f2
|
src/sentry/api/serializers/models/organization_member.py
|
src/sentry/api/serializers/models/organization_member.py
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
}
return d
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
|
Add avatarUrl to team member serializers
|
Add avatarUrl to team member serializers
Conflicts:
src/sentry/api/serializers/models/organization_member.py
src/sentry/api/serializers/models/release.py
cherry-pick 8ee1bee748ae7f51987ea8ec5ee10795b656cfd9
|
Python
|
bsd-3-clause
|
jean/sentry,gencer/sentry,looker/sentry,ngonzalvez/sentry,gg7/sentry,mvaled/sentry,nicholasserra/sentry,wong2/sentry,beeftornado/sentry,JamesMura/sentry,alexm92/sentry,JamesMura/sentry,korealerts1/sentry,wujuguang/sentry,BayanGroup/sentry,imankulov/sentry,fotinakis/sentry,JTCunning/sentry,kevinlondon/sentry,jean/sentry,gencer/sentry,hongliang5623/sentry,TedaLIEz/sentry,looker/sentry,pauloschilling/sentry,llonchj/sentry,llonchj/sentry,hongliang5623/sentry,Natim/sentry,wong2/sentry,BuildingLink/sentry,fuziontech/sentry,daevaorn/sentry,jokey2k/sentry,argonemyth/sentry,zenefits/sentry,nicholasserra/sentry,daevaorn/sentry,mvaled/sentry,nicholasserra/sentry,ifduyue/sentry,ngonzalvez/sentry,vperron/sentry,ifduyue/sentry,JTCunning/sentry,gencer/sentry,beeftornado/sentry,gg7/sentry,ewdurbin/sentry,mvaled/sentry,fotinakis/sentry,JTCunning/sentry,BayanGroup/sentry,vperron/sentry,drcapulet/sentry,felixbuenemann/sentry,zenefits/sentry,camilonova/sentry,korealerts1/sentry,JackDanger/sentry,kevinastone/sentry,fuziontech/sentry,kevinlondon/sentry,BuildingLink/sentry,alexm92/sentry,kevinlondon/sentry,korealerts1/sentry,drcapulet/sentry,wong2/sentry,pauloschilling/sentry,JamesMura/sentry,kevinastone/sentry,JackDanger/sentry,looker/sentry,daevaorn/sentry,Kryz/sentry,jean/sentry,felixbuenemann/sentry,looker/sentry,Natim/sentry,1tush/sentry,TedaLIEz/sentry,beeftornado/sentry,JamesMura/sentry,ifduyue/sentry,JamesMura/sentry,felixbuenemann/sentry,mvaled/sentry,boneyao/sentry,zenefits/sentry,looker/sentry,Kryz/sentry,zenefits/sentry,argonemyth/sentry,jokey2k/sentry,camilonova/sentry,Natim/sentry,daevaorn/sentry,jean/sentry,mvaled/sentry,wujuguang/sentry,BuildingLink/sentry,wujuguang/sentry,imankulov/sentry,ewdurbin/sentry,fotinakis/sentry,argonemyth/sentry,hongliang5623/sentry,ifduyue/sentry,llonchj/sentry,drcapulet/sentry,1tush/sentry,ewdurbin/sentry,songyi199111/sentry,gg7/sentry,fotinakis/sentry,songyi199111/sentry,Kryz/sentry,boneyao/sentry,jean/sentry,1tush/sentry,jokey2k/sentry,vperron/sentry,fuziontech/sentry,zenefits/sentry,TedaLIEz/sentry,gencer/sentry,kevinastone/sentry,songyi199111/sentry,boneyao/sentry,JackDanger/sentry,mitsuhiko/sentry,BuildingLink/sentry,alexm92/sentry,ngonzalvez/sentry,mvaled/sentry,pauloschilling/sentry,mitsuhiko/sentry,BayanGroup/sentry,imankulov/sentry,ifduyue/sentry,camilonova/sentry,BuildingLink/sentry,gencer/sentry
|
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
+ from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
+ 'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
|
Add avatarUrl to team member serializers
|
## Code Before:
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
}
return d
## Instruction:
Add avatarUrl to team member serializers
## Code After:
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
|
// ... existing code ...
from sentry.models import OrganizationMember
from sentry.utils.avatar import get_gravatar_url
// ... modified code ...
'dateCreated': obj.date_added,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
// ... rest of the code ...
|
20224e4fe8b93dee087dd7a455f9709b9795a026
|
app/models.py
|
app/models.py
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
Make title unique Talk property
|
Make title unique Talk property
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
- title = database.Column(database.String(128), nullable=False)
+ title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
Make title unique Talk property
|
## Code Before:
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
## Instruction:
Make title unique Talk property
## Code After:
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
# ... existing code ...
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
# ... rest of the code ...
|
17c90fd954441c2623495e50a2f89790e1ff5489
|
projects/tests/test_tools.py
|
projects/tests/test_tools.py
|
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.assertIsNone(self.mixin.check_can_access(
MagicMock(user=self.user),
))
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
with self.assertRaises(PermissionDenied):
self.mixin.check_can_access(MagicMock(user=self.user))
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
import sure
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.mixin.check_can_access(
MagicMock(user=self.user),
).should.be.none
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
self.mixin.check_can_access.when\
.called_with(MagicMock(user=self.user))\
.should.throw(PermissionDenied)
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
Use sure in project tools cases
|
Use sure in project tools cases
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
+ import sure
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
- self.assertIsNone(self.mixin.check_can_access(
+ self.mixin.check_can_access(
MagicMock(user=self.user),
- ))
+ ).should.be.none
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
- with self.assertRaises(PermissionDenied):
- self.mixin.check_can_access(MagicMock(user=self.user))
+ self.mixin.check_can_access.when\
+ .called_with(MagicMock(user=self.user))\
+ .should.throw(PermissionDenied)
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
Use sure in project tools cases
|
## Code Before:
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.assertIsNone(self.mixin.check_can_access(
MagicMock(user=self.user),
))
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
with self.assertRaises(PermissionDenied):
self.mixin.check_can_access(MagicMock(user=self.user))
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
## Instruction:
Use sure in project tools cases
## Code After:
import sure
from mock import MagicMock
from django.core.exceptions import PermissionDenied
from django.test import TestCase
from accounts.tests.factories import UserFactory
from ..utils import ProjectAccessMixin
from ..models import Project
from . import factories
class ProjectAccessMixinCase(TestCase):
"""Project access mixin case"""
def setUp(self):
self._orig_can_access = Project.can_access
Project.can_access = MagicMock()
self._orig_update = Project.objects.update_user_projects
Project.objects.update_user_projects = MagicMock()
self.mixin = ProjectAccessMixin()
self.project = factories.ProjectFactory()
self.mixin.get_project = MagicMock(return_value=self.project)
self.user = UserFactory()
def tearDown(self):
Project.can_access = self._orig_can_access
Project.objects.update_user_projects = self._orig_update
def test_can_access(self):
"""Test can access"""
Project.can_access.return_value = True
self.mixin.check_can_access(
MagicMock(user=self.user),
).should.be.none
def test_call_update_if_organization(self):
"""Test call update if organization"""
Project.can_access.return_value = False
self.mixin.check_can_access.when\
.called_with(MagicMock(user=self.user))\
.should.throw(PermissionDenied)
Project.objects.update_user_projects.asset_called_once_with(
self.user,
)
|
# ... existing code ...
import sure
from mock import MagicMock
# ... modified code ...
Project.can_access.return_value = True
self.mixin.check_can_access(
MagicMock(user=self.user),
).should.be.none
...
Project.can_access.return_value = False
self.mixin.check_can_access.when\
.called_with(MagicMock(user=self.user))\
.should.throw(PermissionDenied)
Project.objects.update_user_projects.asset_called_once_with(
# ... rest of the code ...
|
0da4c663e8a48bb759a140ca304ce35d3a8b5dcf
|
pyconde/events/templatetags/event_tags.py
|
pyconde/events/templatetags/event_tags.py
|
import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
|
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
|
Remove future-restriction on list_events tag
|
Remove future-restriction on list_events tag
|
Python
|
bsd-3-clause
|
zerok/pyconde-website-mirror,EuroPython/djep,EuroPython/djep,EuroPython/djep,pysv/djep,pysv/djep,EuroPython/djep,zerok/pyconde-website-mirror,pysv/djep,pysv/djep,pysv/djep,zerok/pyconde-website-mirror
|
- import datetime
-
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
- def list_events(number_of_events=3):
+ def list_events(number_of_events=None):
- now = datetime.datetime.now()
- events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
+ events = models.Event.objects.all()
+ if number_of_events is not None:
+ events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
|
Remove future-restriction on list_events tag
|
## Code Before:
import datetime
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=3):
now = datetime.datetime.now()
events = models.Event.objects.filter(date__gte=now).all()[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
## Instruction:
Remove future-restriction on list_events tag
## Code After:
from django import template
from .. import models
register = template.Library()
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
for evt in events:
if evt.end_date:
has_range = True
break
return {
'events': events,
'has_range': has_range
}
|
// ... existing code ...
from django import template
// ... modified code ...
@register.inclusion_tag('events/tags/list_events.html')
def list_events(number_of_events=None):
events = models.Event.objects.all()
if number_of_events is not None:
events = events[:number_of_events]
has_range = False
// ... rest of the code ...
|
94aec9e4a6501e875dbd6b59df57598f742a82da
|
ca_on_niagara/people.py
|
ca_on_niagara/people.py
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
Add comments about new file
|
ca_on_niagara: Add comments about new file
|
Python
|
mit
|
opencivicdata/scrapers-ca,opencivicdata/scrapers-ca
|
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
+ # The new data file:
+ # * has underscores in headers
+ # * yses "District_ID" instead of "District name"
+ # * prefixes "District_ID" with "Niagara Region - "
+ # https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
Add comments about new file
|
## Code Before:
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
## Instruction:
Add comments about new file
## Code After:
from __future__ import unicode_literals
from utils import CSVScraper
COUNCIL_PAGE = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
many_posts_per_area = True
|
# ... existing code ...
class NiagaraPersonScraper(CSVScraper):
# The new data file:
# * has underscores in headers
# * yses "District_ID" instead of "District name"
# * prefixes "District_ID" with "Niagara Region - "
# https://www.niagaraopendata.ca//dataset/ee767222-c7fc-4541-8cad-a27276a3522b/resource/af5621ad-c2e4-4569-803f-4aadca4173be/download/councilelectedofficials.csv
csv_url = 'http://www.niagararegion.ca/test/sherpa-list-to-csv.aspx?list=council-elected-officials-csv'
# ... rest of the code ...
|
299fadcde71558bc1e77ba396cc544619373c2b1
|
conditional/blueprints/spring_evals.py
|
conditional/blueprints/spring_evals.py
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
Add social events to spring evals 😿
|
Add social events to spring evals 😿
|
Python
|
mit
|
RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,ComputerScienceHouse/conditional,RamZallan/conditional,ComputerScienceHouse/conditional
|
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
+ 'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
+ 'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
Add social events to spring evals 😿
|
## Code Before:
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
## Instruction:
Add social events to spring evals 😿
## Code After:
from flask import Blueprint
from flask import render_template
from flask import request
spring_evals_bp = Blueprint('spring_evals_bp', __name__)
@spring_evals_bp.route('/spring_evals/')
def display_spring_evals():
# get user data
user_name = request.headers.get('x-webauth-user')
members = [
{
'name': "Liam Middlebrook",
'committee_meetings': 24,
'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}],
'major_project': 'open_container',
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
'result': 'Pending'
},
{
'name': "Julien Eid",
'committee_meetings': 69,
'house_meetings_missed': [],
'major_project': 'wii-u shit',
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
'result': 'Passed'
}
]
# return names in 'first last (username)' format
return render_template('spring_evals.html',
username = user_name,
members = members)
|
// ... existing code ...
'major_project_passed': True,
'social_events': "",
'comments': "please don't fail me",
// ... modified code ...
'major_project_passed': True,
'social_events': "Manipulation and Opportunism",
'comments': "imdabes",
// ... rest of the code ...
|
916b86865acf0297293e4a13f1da6838f9b2711f
|
scripts/lib/errors.py
|
scripts/lib/errors.py
|
""" Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
from lib.config import emergency_id
from lib.commands import vk, api
class ErrorManager:
""" Упрощенное оповещение об ошибках
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
def __init__(self, name):
self.name = name
def __enter__(self):
pass
def __exit__(self, *args):
if args[0] is not None:
sendErrorMessage(self.name)
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
|
""" Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
from contextlib import contextmanager
from lib.config import emergency_id
from lib.commands import vk, api
@contextmanager
def ErrorManager(name):
""" Упрощенное оповещение об ошибках
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
try:
yield
except Exception as e:
sendErrorMessage(name)
raise e
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
|
Change error class to function
|
Change error class to function
|
Python
|
mit
|
Varabe/Guild-Manager
|
""" Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
+ from contextlib import contextmanager
from lib.config import emergency_id
from lib.commands import vk, api
- class ErrorManager:
+ @contextmanager
+ def ErrorManager(name):
""" Упрощенное оповещение об ошибках
- str name: название скрипта (обычно укороченное)
+ str name: название скрипта (обычно укороченное)
- Использование: with ErrorManager(name): main()
+ Использование: with ErrorManager(name): main()
"""
+ try:
+ yield
+ except Exception as e:
- def __init__(self, name):
- self.name = name
-
- def __enter__(self):
- pass
-
- def __exit__(self, *args):
- if args[0] is not None:
- sendErrorMessage(self.name)
+ sendErrorMessage(name)
+ raise e
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
|
Change error class to function
|
## Code Before:
""" Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
from lib.config import emergency_id
from lib.commands import vk, api
class ErrorManager:
""" Упрощенное оповещение об ошибках
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
def __init__(self, name):
self.name = name
def __enter__(self):
pass
def __exit__(self, *args):
if args[0] is not None:
sendErrorMessage(self.name)
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
## Instruction:
Change error class to function
## Code After:
""" Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
from contextlib import contextmanager
from lib.config import emergency_id
from lib.commands import vk, api
@contextmanager
def ErrorManager(name):
""" Упрощенное оповещение об ошибках
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
try:
yield
except Exception as e:
sendErrorMessage(name)
raise e
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
|
// ... existing code ...
from traceback import format_exception, format_exc
from contextlib import contextmanager
from lib.config import emergency_id
// ... modified code ...
@contextmanager
def ErrorManager(name):
""" Упрощенное оповещение об ошибках
...
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
try:
yield
except Exception as e:
sendErrorMessage(name)
raise e
// ... rest of the code ...
|
e189844bd6179d49665deb1c9ef56206213fc800
|
hungry/__init__.py
|
hungry/__init__.py
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
Fix bug: Did not catch all exceptions
|
Fix bug: Did not catch all exceptions
|
Python
|
mit
|
denizdogan/hungry
|
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
- # catch all exceptions
+ # default to catching any exception
- if ex == ():
+ exceptions = ex or Exception
+
+ # catch any exception in `exceptions`
- try:
+ try:
- return func(*args, **kw)
+ return func(*args, **kw)
- except Exception as e:
+ except exceptions as e:
- return caught_it(e)
+ return caught_it(e)
- # catch only exceptions in `ex`
- else:
- try:
- return func(*args, **kw)
- except ex as e:
- return caught_it(e)
return wrapper
return inner
|
Fix bug: Did not catch all exceptions
|
## Code Before:
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# catch all exceptions
if ex == ():
try:
return func(*args, **kw)
except Exception as e:
return caught_it(e)
# catch only exceptions in `ex`
else:
try:
return func(*args, **kw)
except ex as e:
return caught_it(e)
return wrapper
return inner
## Instruction:
Fix bug: Did not catch all exceptions
## Code After:
__version__ = '0.0.5'
def eat(*ex, **kwargs):
error_handler = kwargs.get('error_handler', None)
error_value = kwargs.get('error_value', None)
def inner(func):
def wrapper(*args, **kw):
def caught_it(e):
"""
Calls the error handler or returns the error value.
:param e: The caught exception.
:return: The error value or the result of the error handler.
"""
if error_handler is not None:
return error_handler(e, *args, **kw)
return error_value
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
return wrapper
return inner
|
# ... existing code ...
# default to catching any exception
exceptions = ex or Exception
# catch any exception in `exceptions`
try:
return func(*args, **kw)
except exceptions as e:
return caught_it(e)
# ... rest of the code ...
|
8a9a0f1dc277d26767ffc3f34c00c18d00bd5e2e
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "[email protected]"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "[email protected]"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
Fix missing generator on Windows
|
Fix missing generator on Windows
- Since compiler is not listed on settings, CMake is not
able to detect a valid generator, using MingGW by default
- Add topics to be used as tags for searching
Signed-off-by: Uilian Ries <[email protected]>
|
Python
|
mit
|
ColinH/PEGTL,ColinH/PEGTL
|
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
+ topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "[email protected]"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
+ settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
Fix missing generator on Windows
|
## Code Before:
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
url = homepage
license = "MIT"
author = "[email protected]"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
## Instruction:
Fix missing generator on Windows
## Code After:
from conans import ConanFile, CMake
class PEGTLConan(ConanFile):
name = "pegtl"
description = "C++ header-only parser combinator library for creating PEG parsers"
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
license = "MIT"
author = "[email protected]"
exports = "LICENSE"
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
no_copy_source = True
def build(self):
pass
def package(self):
cmake = CMake(self)
cmake.definitions["PEGTL_BUILD_TESTS"] = "OFF"
cmake.definitions["PEGTL_BUILD_EXAMPLES"] = "OFF"
cmake.definitions["PEGTL_INSTALL_DOC_DIR"] = "licenses"
cmake.configure()
cmake.install()
def package_id(self):
self.info.header_only()
|
...
homepage = "https://github.com/taocpp/PEGTL"
topics = ("conan", "taocpp", "pegtl", "peg", "grammar", "parsing")
url = homepage
...
exports_sources = "include/*", "CMakeLists.txt"
settings = "compiler"
generators = "cmake"
...
|
31eae0aee3a6ae9fa7abea312ff1ea843a98e853
|
graphene/contrib/django/tests/models.py
|
graphene/contrib/django/tests/models.py
|
from __future__ import absolute_import
from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=30)
class Film(models.Model):
reporters = models.ManyToManyField('Reporter',
related_name='films')
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
pets = models.ManyToManyField('self')
def __str__(self): # __unicode__ on Python 2
return "%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, related_name='articles')
def __str__(self): # __unicode__ on Python 2
return self.headline
class Meta:
ordering = ('headline',)
|
from __future__ import absolute_import
from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=30)
class Film(models.Model):
reporters = models.ManyToManyField('Reporter',
related_name='films')
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
pets = models.ManyToManyField('self')
def __str__(self): # __unicode__ on Python 2
return "%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, related_name='articles')
lang = models.CharField(max_length=2, help_text='Language', choices=[
('es', 'Spanish'),
('en', 'English')
], default='es')
def __str__(self): # __unicode__ on Python 2
return self.headline
class Meta:
ordering = ('headline',)
|
Improve Django field conversion real-life tests
|
Improve Django field conversion real-life tests
|
Python
|
mit
|
graphql-python/graphene,sjhewitt/graphene,Globegitter/graphene,sjhewitt/graphene,Globegitter/graphene,graphql-python/graphene
|
from __future__ import absolute_import
from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=30)
class Film(models.Model):
reporters = models.ManyToManyField('Reporter',
related_name='films')
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
pets = models.ManyToManyField('self')
def __str__(self): # __unicode__ on Python 2
return "%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, related_name='articles')
+ lang = models.CharField(max_length=2, help_text='Language', choices=[
+ ('es', 'Spanish'),
+ ('en', 'English')
+ ], default='es')
def __str__(self): # __unicode__ on Python 2
return self.headline
class Meta:
ordering = ('headline',)
|
Improve Django field conversion real-life tests
|
## Code Before:
from __future__ import absolute_import
from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=30)
class Film(models.Model):
reporters = models.ManyToManyField('Reporter',
related_name='films')
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
pets = models.ManyToManyField('self')
def __str__(self): # __unicode__ on Python 2
return "%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, related_name='articles')
def __str__(self): # __unicode__ on Python 2
return self.headline
class Meta:
ordering = ('headline',)
## Instruction:
Improve Django field conversion real-life tests
## Code After:
from __future__ import absolute_import
from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=30)
class Film(models.Model):
reporters = models.ManyToManyField('Reporter',
related_name='films')
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
pets = models.ManyToManyField('self')
def __str__(self): # __unicode__ on Python 2
return "%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, related_name='articles')
lang = models.CharField(max_length=2, help_text='Language', choices=[
('es', 'Spanish'),
('en', 'English')
], default='es')
def __str__(self): # __unicode__ on Python 2
return self.headline
class Meta:
ordering = ('headline',)
|
// ... existing code ...
reporter = models.ForeignKey(Reporter, related_name='articles')
lang = models.CharField(max_length=2, help_text='Language', choices=[
('es', 'Spanish'),
('en', 'English')
], default='es')
// ... rest of the code ...
|
2d5c5a1bf693f428b53f8d4a6e788f7be864aa9e
|
image_site_app/forms.py
|
image_site_app/forms.py
|
from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
|
from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
|
Add placeholder to first_name and last_name fields in signup form
|
Add placeholder to first_name and last_name fields in signup form
|
Python
|
mit
|
frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq
|
from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
- first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
- last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
+ first_name = forms.CharField(max_length=30,
+ label='First name (optional)',
+ required=False,
+ widget=forms.TextInput(attrs={
+ 'placeholder': 'First name'
+ }))
+
+ last_name = forms.CharField(max_length=30,
+ label='Last name (optional)',
+ required=False,
+ widget=forms.TextInput(attrs={
+ 'placeholder': 'Last name'
+ }))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
|
Add placeholder to first_name and last_name fields in signup form
|
## Code Before:
from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30, label='First name (optional)', required=False)
last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False)
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
## Instruction:
Add placeholder to first_name and last_name fields in signup form
## Code After:
from django import forms
class SignupForm(forms.Form):
field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2']
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
def signup(self, request, user):
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
|
# ... existing code ...
first_name = forms.CharField(max_length=30,
label='First name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'First name'
}))
last_name = forms.CharField(max_length=30,
label='Last name (optional)',
required=False,
widget=forms.TextInput(attrs={
'placeholder': 'Last name'
}))
# ... rest of the code ...
|
78585c783013c6f06f7e20eee6a654759b70e99c
|
tests/test_ttfmt.py
|
tests/test_ttfmt.py
|
import unittest
class TestTtFmt(unittest.TestCase):
def testName(self):
pass
if __name__ == "__main__":
unittest.main()
|
import unittest
import tt.fmttools.ttfmt as ttfmt
class TestTtFmt(unittest.TestCase):
def test_get_vars(self):
data_provider = {
# Simple test cases
"F = A and B" : ["F", "A", "B"],
"F = A and B or C" : ["F", "A", "B", "C"],
}
for eq in data_provider:
self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq))
if __name__ == "__main__":
unittest.main()
|
Add basic tests for ttfmt get_vars method
|
Add basic tests for ttfmt get_vars method
|
Python
|
mit
|
welchbj/tt,welchbj/tt,welchbj/tt
|
import unittest
+
+ import tt.fmttools.ttfmt as ttfmt
class TestTtFmt(unittest.TestCase):
- def testName(self):
+ def test_get_vars(self):
+ data_provider = {
+ # Simple test cases
+ "F = A and B" : ["F", "A", "B"],
+ "F = A and B or C" : ["F", "A", "B", "C"],
+ }
- pass
+
+ for eq in data_provider:
+ self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq))
if __name__ == "__main__":
unittest.main()
|
Add basic tests for ttfmt get_vars method
|
## Code Before:
import unittest
class TestTtFmt(unittest.TestCase):
def testName(self):
pass
if __name__ == "__main__":
unittest.main()
## Instruction:
Add basic tests for ttfmt get_vars method
## Code After:
import unittest
import tt.fmttools.ttfmt as ttfmt
class TestTtFmt(unittest.TestCase):
def test_get_vars(self):
data_provider = {
# Simple test cases
"F = A and B" : ["F", "A", "B"],
"F = A and B or C" : ["F", "A", "B", "C"],
}
for eq in data_provider:
self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq))
if __name__ == "__main__":
unittest.main()
|
// ... existing code ...
import unittest
import tt.fmttools.ttfmt as ttfmt
// ... modified code ...
def test_get_vars(self):
data_provider = {
# Simple test cases
"F = A and B" : ["F", "A", "B"],
"F = A and B or C" : ["F", "A", "B", "C"],
}
for eq in data_provider:
self.assertListEqual(data_provider[eq], ttfmt.get_vars(eq))
// ... rest of the code ...
|
896b385f983ecf939bdc2ea938b9949fdc3fdbb8
|
colorise/color_tools.py
|
colorise/color_tools.py
|
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
Remove unused color distance function
|
Remove unused color distance function
|
Python
|
bsd-3-clause
|
MisanthropicBit/colorise
|
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
- def color_distance(rgb1, rgb2):
- """Compute the Euclidian distance between two colors."""
- r1, g1, b1 = rgb1
- r2, g2, b2 = rgb2
-
- return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
-
-
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
Remove unused color distance function
|
## Code Before:
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
## Instruction:
Remove unused color distance function
## Code After:
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
// ... existing code ...
def closest_color(rgb, clut):
// ... rest of the code ...
|
2f039066530533b1a8ae82076ed745c1f2e03688
|
app-tasks/rf/src/rf/uploads/geotiff/create_images.py
|
app-tasks/rf/src/rf/uploads/geotiff/create_images.py
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
Add ability to define band create function for geotiff images
|
Add ability to define band create function for geotiff images
This commit makes defining bands for custom geotiffs more flexible by
allowing passing custom functions for defining bands for different
datasources or other variables - subsequent commits for MODIS take
advantage of this
|
Python
|
apache-2.0
|
raster-foundry/raster-foundry,aaronxsu/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,aaronxsu/raster-foundry,aaronxsu/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,raster-foundry/raster-foundry,raster-foundry/raster-foundry,aaronxsu/raster-foundry
|
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
- owner=None):
+ owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
+ band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
- create_geotiff_bands(tif_path),
+ band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
Add ability to define band create function for geotiff images
|
## Code Before:
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
create_geotiff_bands(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
## Instruction:
Add ability to define band create function for geotiff images
## Code After:
import os
from rf.models import Image
from rf.utils.io import Visibility
from .io import get_geotiff_size_bytes, get_geotiff_resolution
from .create_bands import create_geotiff_bands
def create_geotiff_image(organizationId, tif_path, sourceuri, filename=None,
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
Args:
orgnizationId (str): UUID of organization that this image belongs to
tif_path (str): Local path to tif file
sourceuri (str): remote source of image
visibility (str): accessibility level for object
imageMetadata (dict): Optional dict of metadata about the image
scene (Scene): Optional Scene object holding this image
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
filename = filename if filename else os.path.basename(tif_path)
return Image(
organizationId,
get_geotiff_size_bytes(tif_path),
visibility,
filename,
sourceuri,
band_create_function(tif_path),
imageMetadata,
# TIFFs can have a different resolution in the X and Y directions, that is, pixels can be
# rectangular with respect to the ground. The RF API doesn't currently support this, so just
# select the X resolution.
get_geotiff_resolution(tif_path)[0],
[],
scene=scene,
owner=owner
)
|
...
visibility=Visibility.PRIVATE, imageMetadata={}, scene=None,
owner=None, band_create_function=create_geotiff_bands):
"""Create an Image object from a GeoTIFF.
...
owner (str): Optional owner of an image
band_create_function (function): function to aid in creating bands for a geotiff
"""
...
sourceuri,
band_create_function(tif_path),
imageMetadata,
...
|
6eea9e787107a83be36b03d93cddfe7fdf1e9e05
|
tools/skp/page_sets/skia_amazon_desktop.py
|
tools/skp/page_sets/skia_amazon_desktop.py
|
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
|
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
|
Add wait to amazon page set to avoid tab crashes
|
Add wait to amazon page set to avoid tab crashes
BUG=skia:3049
TBR=borenet
NOTRY=true
Review URL: https://codereview.chromium.org/686133002
|
Python
|
bsd-3-clause
|
HalCanary/skia-hc,samuelig/skia,noselhq/skia,spezi77/android_external_skia,w3nd1go/android_external_skia,geekboxzone/mmallow_external_skia,YUPlayGodDev/platform_external_skia,AOSP-YU/platform_external_skia,PAC-ROM/android_external_skia,Infinitive-OS/platform_external_skia,UBERMALLOW/external_skia,scroggo/skia,AOSPB/external_skia,chenlian2015/skia_from_google,rubenvb/skia,shahrzadmn/skia,VRToxin-AOSP/android_external_skia,PAC-ROM/android_external_skia,TeamTwisted/external_skia,w3nd1go/android_external_skia,google/skia,PAC-ROM/android_external_skia,jtg-gg/skia,vanish87/skia,qrealka/skia-hc,shahrzadmn/skia,AOSP-YU/platform_external_skia,DiamondLovesYou/skia-sys,noselhq/skia,vanish87/skia,Hikari-no-Tenshi/android_external_skia,TeamTwisted/external_skia,ominux/skia,TeamExodus/external_skia,DiamondLovesYou/skia-sys,geekboxzone/mmallow_external_skia,HalCanary/skia-hc,amyvmiwei/skia,Hikari-no-Tenshi/android_external_skia,PAC-ROM/android_external_skia,vanish87/skia,UBERMALLOW/external_skia,qrealka/skia-hc,todotodoo/skia,rubenvb/skia,MinimalOS-AOSP/platform_external_skia,OneRom/external_skia,boulzordev/android_external_skia,UBERMALLOW/external_skia,nfxosp/platform_external_skia,tmpvar/skia.cc,noselhq/skia,Igalia/skia,HalCanary/skia-hc,pcwalton/skia,nfxosp/platform_external_skia,BrokenROM/external_skia,samuelig/skia,tmpvar/skia.cc,Jichao/skia,rubenvb/skia,nvoron23/skia,YUPlayGodDev/platform_external_skia,AOSP-YU/platform_external_skia,timduru/platform-external-skia,jtg-gg/skia,DiamondLovesYou/skia-sys,amyvmiwei/skia,AOSPB/external_skia,jtg-gg/skia,AOSPB/external_skia,todotodoo/skia,geekboxzone/mmallow_external_skia,nvoron23/skia,pcwalton/skia,nfxosp/platform_external_skia,TeamExodus/external_skia,TeamTwisted/external_skia,timduru/platform-external-skia,TeamExodus/external_skia,todotodoo/skia,pcwalton/skia,BrokenROM/external_skia,Infinitive-OS/platform_external_skia,pcwalton/skia,shahrzadmn/skia,VRToxin-AOSP/android_external_skia,scroggo/skia,nfxosp/platform_external_skia,MonkeyZZZZ/platform_external_skia,geekboxzone/mmallow_external_skia,Igalia/skia,qrealka/skia-hc,TeamExodus/external_skia,rubenvb/skia,samuelig/skia,samuelig/skia,YUPlayGodDev/platform_external_skia,vanish87/skia,tmpvar/skia.cc,HalCanary/skia-hc,TeamExodus/external_skia,nvoron23/skia,BrokenROM/external_skia,ominux/skia,ominux/skia,BrokenROM/external_skia,Hikari-no-Tenshi/android_external_skia,DiamondLovesYou/skia-sys,google/skia,nvoron23/skia,MinimalOS-AOSP/platform_external_skia,amyvmiwei/skia,w3nd1go/android_external_skia,boulzordev/android_external_skia,tmpvar/skia.cc,qrealka/skia-hc,qrealka/skia-hc,shahrzadmn/skia,MonkeyZZZZ/platform_external_skia,rubenvb/skia,boulzordev/android_external_skia,spezi77/android_external_skia,invisiblek/android_external_skia,ominux/skia,MonkeyZZZZ/platform_external_skia,MarshedOut/android_external_skia,invisiblek/android_external_skia,google/skia,HalCanary/skia-hc,MarshedOut/android_external_skia,rubenvb/skia,invisiblek/android_external_skia,noselhq/skia,aosp-mirror/platform_external_skia,nvoron23/skia,Igalia/skia,Jichao/skia,qrealka/skia-hc,HalCanary/skia-hc,tmpvar/skia.cc,AOSP-YU/platform_external_skia,AOSP-YU/platform_external_skia,Infinitive-OS/platform_external_skia,amyvmiwei/skia,w3nd1go/android_external_skia,shahrzadmn/skia,Infinitive-OS/platform_external_skia,chenlian2015/skia_from_google,todotodoo/skia,nfxosp/platform_external_skia,AOSP-YU/platform_external_skia,OneRom/external_skia,rubenvb/skia,nfxosp/platform_external_skia,geekboxzone/mmallow_external_skia,DiamondLovesYou/skia-sys,boulzordev/android_external_skia,YUPlayGodDev/platform_external_skia,samuelig/skia,invisiblek/android_external_skia,tmpvar/skia.cc,google/skia,chenlian2015/skia_from_google,timduru/platform-external-skia,MinimalOS-AOSP/platform_external_skia,UBERMALLOW/external_skia,MinimalOS-AOSP/platform_external_skia,UBERMALLOW/external_skia,chenlian2015/skia_from_google,OneRom/external_skia,OneRom/external_skia,TeamTwisted/external_skia,UBERMALLOW/external_skia,MinimalOS-AOSP/platform_external_skia,AOSPB/external_skia,YUPlayGodDev/platform_external_skia,MonkeyZZZZ/platform_external_skia,BrokenROM/external_skia,geekboxzone/mmallow_external_skia,TeamExodus/external_skia,google/skia,OneRom/external_skia,MarshedOut/android_external_skia,VRToxin-AOSP/android_external_skia,MarshedOut/android_external_skia,VRToxin-AOSP/android_external_skia,MinimalOS-AOSP/platform_external_skia,PAC-ROM/android_external_skia,pcwalton/skia,timduru/platform-external-skia,vanish87/skia,Igalia/skia,scroggo/skia,noselhq/skia,HalCanary/skia-hc,amyvmiwei/skia,invisiblek/android_external_skia,HalCanary/skia-hc,AOSP-YU/platform_external_skia,boulzordev/android_external_skia,aosp-mirror/platform_external_skia,vanish87/skia,ominux/skia,MinimalOS-AOSP/platform_external_skia,Infinitive-OS/platform_external_skia,scroggo/skia,TeamTwisted/external_skia,jtg-gg/skia,MarshedOut/android_external_skia,boulzordev/android_external_skia,nfxosp/platform_external_skia,Hikari-no-Tenshi/android_external_skia,ominux/skia,w3nd1go/android_external_skia,w3nd1go/android_external_skia,aosp-mirror/platform_external_skia,vanish87/skia,TeamExodus/external_skia,google/skia,YUPlayGodDev/platform_external_skia,chenlian2015/skia_from_google,MarshedOut/android_external_skia,samuelig/skia,boulzordev/android_external_skia,noselhq/skia,BrokenROM/external_skia,Jichao/skia,VRToxin-AOSP/android_external_skia,nfxosp/platform_external_skia,w3nd1go/android_external_skia,todotodoo/skia,UBERMALLOW/external_skia,Infinitive-OS/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,DiamondLovesYou/skia-sys,google/skia,google/skia,shahrzadmn/skia,Igalia/skia,MonkeyZZZZ/platform_external_skia,Hikari-no-Tenshi/android_external_skia,tmpvar/skia.cc,boulzordev/android_external_skia,UBERMALLOW/external_skia,AOSPB/external_skia,timduru/platform-external-skia,shahrzadmn/skia,DiamondLovesYou/skia-sys,MonkeyZZZZ/platform_external_skia,VRToxin-AOSP/android_external_skia,todotodoo/skia,TeamTwisted/external_skia,VRToxin-AOSP/android_external_skia,UBERMALLOW/external_skia,Igalia/skia,google/skia,nvoron23/skia,PAC-ROM/android_external_skia,spezi77/android_external_skia,invisiblek/android_external_skia,scroggo/skia,google/skia,scroggo/skia,scroggo/skia,noselhq/skia,OneRom/external_skia,invisiblek/android_external_skia,invisiblek/android_external_skia,nvoron23/skia,TeamTwisted/external_skia,PAC-ROM/android_external_skia,OneRom/external_skia,YUPlayGodDev/platform_external_skia,YUPlayGodDev/platform_external_skia,jtg-gg/skia,jtg-gg/skia,pcwalton/skia,MarshedOut/android_external_skia,Jichao/skia,TeamTwisted/external_skia,samuelig/skia,Infinitive-OS/platform_external_skia,geekboxzone/mmallow_external_skia,Jichao/skia,HalCanary/skia-hc,BrokenROM/external_skia,VRToxin-AOSP/android_external_skia,rubenvb/skia,Jichao/skia,jtg-gg/skia,Hikari-no-Tenshi/android_external_skia,Jichao/skia,pcwalton/skia,noselhq/skia,TeamTwisted/external_skia,vanish87/skia,BrokenROM/external_skia,MarshedOut/android_external_skia,timduru/platform-external-skia,qrealka/skia-hc,VRToxin-AOSP/android_external_skia,spezi77/android_external_skia,MarshedOut/android_external_skia,nvoron23/skia,AOSPB/external_skia,ominux/skia,aosp-mirror/platform_external_skia,scroggo/skia,todotodoo/skia,spezi77/android_external_skia,Hikari-no-Tenshi/android_external_skia,PAC-ROM/android_external_skia,aosp-mirror/platform_external_skia,todotodoo/skia,MonkeyZZZZ/platform_external_skia,samuelig/skia,rubenvb/skia,aosp-mirror/platform_external_skia,MonkeyZZZZ/platform_external_skia,YUPlayGodDev/platform_external_skia,amyvmiwei/skia,aosp-mirror/platform_external_skia,ominux/skia,pcwalton/skia,Igalia/skia,Hikari-no-Tenshi/android_external_skia,amyvmiwei/skia,Infinitive-OS/platform_external_skia,shahrzadmn/skia,tmpvar/skia.cc,vanish87/skia,w3nd1go/android_external_skia,MinimalOS-AOSP/platform_external_skia,todotodoo/skia,geekboxzone/mmallow_external_skia,MinimalOS-AOSP/platform_external_skia,boulzordev/android_external_skia,ominux/skia,pcwalton/skia,Jichao/skia,Igalia/skia,tmpvar/skia.cc,TeamExodus/external_skia,Infinitive-OS/platform_external_skia,HalCanary/skia-hc,noselhq/skia,TeamExodus/external_skia,AOSPB/external_skia,AOSPB/external_skia,spezi77/android_external_skia,shahrzadmn/skia,AOSPB/external_skia,nfxosp/platform_external_skia,OneRom/external_skia,chenlian2015/skia_from_google,aosp-mirror/platform_external_skia,Jichao/skia,rubenvb/skia,qrealka/skia-hc,amyvmiwei/skia,OneRom/external_skia,chenlian2015/skia_from_google,MonkeyZZZZ/platform_external_skia,PAC-ROM/android_external_skia,AOSP-YU/platform_external_skia,nvoron23/skia,timduru/platform-external-skia,AOSP-YU/platform_external_skia,geekboxzone/mmallow_external_skia,w3nd1go/android_external_skia
|
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
+
+ def RunNavigateSteps(self, action_runner):
+ action_runner.NavigateToPage(self)
+ action_runner.Wait(15)
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
|
Add wait to amazon page set to avoid tab crashes
|
## Code Before:
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
## Instruction:
Add wait to amazon page set to avoid tab crashes
## Code After:
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
|
...
self.archive_data_file = 'data/skia_amazon_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
...
|
8dcf5b2c85430a09502649bb3bb95c7b56312c03
|
pysearch/urls.py
|
pysearch/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^search/', include('search.urls')),
)
|
Connect search route to app
|
Connect search route to app
|
Python
|
mit
|
nh0815/PySearch,nh0815/PySearch
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
+ url(r'^search/', include('search.urls')),
)
|
Connect search route to app
|
## Code Before:
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
## Instruction:
Connect search route to app
## Code After:
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^search/', include('search.urls')),
)
|
...
url(r'^admin/', include(admin.site.urls)),
url(r'^search/', include('search.urls')),
)
...
|
447f19638c43cf273b6922796a203d33407bc29e
|
test/util.py
|
test/util.py
|
'''Helper code for theanets unit tests.'''
import numpy as np
class MNIST(object):
NUM_DIGITS = 100
DIGIT_SIZE = 784
def setUp(self):
# we just create some random "mnist digit" data of the right shape.
np.random.seed(3)
self.images = np.random.randn(NUM_DIGITS, DIGIT_SIZE).astype('f')
self.labels = np.random.randint(0, 10, NUM_DIGITS).astype('i')
|
'''Helper code for theanets unit tests.'''
import numpy as np
class MNIST(object):
NUM_DIGITS = 100
DIGIT_SIZE = 784
def setUp(self):
# we just create some random "mnist digit" data of the right shape.
np.random.seed(3)
self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f')
self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i')
|
Use proper namespace for constants.
|
Use proper namespace for constants.
|
Python
|
mit
|
chrinide/theanets,devdoer/theanets,lmjohns3/theanets
|
'''Helper code for theanets unit tests.'''
import numpy as np
class MNIST(object):
NUM_DIGITS = 100
DIGIT_SIZE = 784
def setUp(self):
# we just create some random "mnist digit" data of the right shape.
np.random.seed(3)
- self.images = np.random.randn(NUM_DIGITS, DIGIT_SIZE).astype('f')
+ self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f')
- self.labels = np.random.randint(0, 10, NUM_DIGITS).astype('i')
+ self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i')
|
Use proper namespace for constants.
|
## Code Before:
'''Helper code for theanets unit tests.'''
import numpy as np
class MNIST(object):
NUM_DIGITS = 100
DIGIT_SIZE = 784
def setUp(self):
# we just create some random "mnist digit" data of the right shape.
np.random.seed(3)
self.images = np.random.randn(NUM_DIGITS, DIGIT_SIZE).astype('f')
self.labels = np.random.randint(0, 10, NUM_DIGITS).astype('i')
## Instruction:
Use proper namespace for constants.
## Code After:
'''Helper code for theanets unit tests.'''
import numpy as np
class MNIST(object):
NUM_DIGITS = 100
DIGIT_SIZE = 784
def setUp(self):
# we just create some random "mnist digit" data of the right shape.
np.random.seed(3)
self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f')
self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i')
|
# ... existing code ...
np.random.seed(3)
self.images = np.random.randn(MNIST.NUM_DIGITS, MNIST.DIGIT_SIZE).astype('f')
self.labels = np.random.randint(0, 10, MNIST.NUM_DIGITS).astype('i')
# ... rest of the code ...
|
cc1e5bc1ae3b91973d9fa30ab8e0f9cb3c147a9e
|
ddt.py
|
ddt.py
|
from functools import wraps
__version__ = '0.1.1'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
i = 0
for v in getattr(f, MAGIC):
setattr(cls,
"{0}_{1}".format(name, v),
feed_data(f, v))
i = i + 1
delattr(cls, name)
return cls
|
from functools import wraps
__version__ = '0.1.1'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
i = 0
for v in getattr(f, MAGIC):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
i = i + 1
delattr(cls, name)
return cls
|
Use __name__ from data object to set the test method name, if available.
|
Use __name__ from data object to set the test method name, if available.
Allows to provide user-friendly names for the user instead of
the default raw data formatting.
|
Python
|
mit
|
domidimi/ddt,edx/ddt,domidimi/ddt,datadriventests/ddt,edx/ddt,datadriventests/ddt
|
from functools import wraps
__version__ = '0.1.1'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
i = 0
for v in getattr(f, MAGIC):
+ test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
+ setattr(cls, test_name, feed_data(f, v))
- setattr(cls,
- "{0}_{1}".format(name, v),
- feed_data(f, v))
i = i + 1
delattr(cls, name)
return cls
|
Use __name__ from data object to set the test method name, if available.
|
## Code Before:
from functools import wraps
__version__ = '0.1.1'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
i = 0
for v in getattr(f, MAGIC):
setattr(cls,
"{0}_{1}".format(name, v),
feed_data(f, v))
i = i + 1
delattr(cls, name)
return cls
## Instruction:
Use __name__ from data object to set the test method name, if available.
## Code After:
from functools import wraps
__version__ = '0.1.1'
MAGIC = '%values' # this value cannot conflict with any real python attribute
def data(*values):
"""
Method decorator to add to your test methods.
Should be added to methods of instances of ``unittest.TestCase``.
"""
def wrapper(func):
setattr(func, MAGIC, values)
return func
return wrapper
def ddt(cls):
"""
Class decorator for subclasses of ``unittest.TestCase``.
Apply this decorator to the test case class, and then
decorate test methods with ``@data``.
For each method decorated with ``@data``, this will effectively create as
many methods as data items are passed as parameters to ``@data``.
"""
def feed_data(func, *args, **kwargs):
"""
This internal method decorator feeds the test data item to the test.
"""
@wraps(func)
def wrapper(self):
return func(self, *args, **kwargs)
return wrapper
for name, f in cls.__dict__.items():
if hasattr(f, MAGIC):
i = 0
for v in getattr(f, MAGIC):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
i = i + 1
delattr(cls, name)
return cls
|
...
for v in getattr(f, MAGIC):
test_name = getattr(v, "__name__", "{0}_{1}".format(name, v))
setattr(cls, test_name, feed_data(f, v))
i = i + 1
...
|
5e3be1d123063495f21d0c0068c7132d43fd9724
|
account/models.py
|
account/models.py
|
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from course.models import Term
class Profile(models.Model):
user = models.OneToOneField(User)
student_id = models.CharField(max_length=10, null=True)
default_term = models.ForeignKey(Term, null=True)
facebook_id = models.CharField(max_length=50, null=True)
def create_profile(sender, instance, created, **kwargs):
profile = Profile.objects.get(user=instance)
if not profile:
Profile(user=instance).save()
signals.post_save.connect(create_profile, sender=User)
|
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from course.models import Term
class Profile(models.Model):
user = models.OneToOneField(User)
student_id = models.CharField(max_length=10, null=True)
default_term = models.ForeignKey(Term, null=True)
facebook_id = models.CharField(max_length=50, null=True)
def create_profile(sender, instance, created, **kwargs):
try:
profile = Profile.objects.get(user=instance)
except Profile.DoesNotExist:
Profile(user=instance).save()
signals.post_save.connect(create_profile, sender=User)
|
Fix login error for new accounts where a profile doesn't exist
|
Fix login error for new accounts where a profile doesn't exist
|
Python
|
apache-2.0
|
OpenCourseProject/OpenCourse,gravitylow/OpenCourse,gravitylow/OpenCourse,gravitylow/OpenCourse,OpenCourseProject/OpenCourse,OpenCourseProject/OpenCourse
|
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from course.models import Term
class Profile(models.Model):
user = models.OneToOneField(User)
student_id = models.CharField(max_length=10, null=True)
default_term = models.ForeignKey(Term, null=True)
facebook_id = models.CharField(max_length=50, null=True)
def create_profile(sender, instance, created, **kwargs):
+ try:
- profile = Profile.objects.get(user=instance)
+ profile = Profile.objects.get(user=instance)
- if not profile:
+ except Profile.DoesNotExist:
Profile(user=instance).save()
signals.post_save.connect(create_profile, sender=User)
|
Fix login error for new accounts where a profile doesn't exist
|
## Code Before:
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from course.models import Term
class Profile(models.Model):
user = models.OneToOneField(User)
student_id = models.CharField(max_length=10, null=True)
default_term = models.ForeignKey(Term, null=True)
facebook_id = models.CharField(max_length=50, null=True)
def create_profile(sender, instance, created, **kwargs):
profile = Profile.objects.get(user=instance)
if not profile:
Profile(user=instance).save()
signals.post_save.connect(create_profile, sender=User)
## Instruction:
Fix login error for new accounts where a profile doesn't exist
## Code After:
from django.db import models
from django.db.models import signals
from django.contrib.auth.models import User
from course.models import Term
class Profile(models.Model):
user = models.OneToOneField(User)
student_id = models.CharField(max_length=10, null=True)
default_term = models.ForeignKey(Term, null=True)
facebook_id = models.CharField(max_length=50, null=True)
def create_profile(sender, instance, created, **kwargs):
try:
profile = Profile.objects.get(user=instance)
except Profile.DoesNotExist:
Profile(user=instance).save()
signals.post_save.connect(create_profile, sender=User)
|
...
def create_profile(sender, instance, created, **kwargs):
try:
profile = Profile.objects.get(user=instance)
except Profile.DoesNotExist:
Profile(user=instance).save()
...
|
356c9cd23ebf4953af169f38126fd521b49ca6c4
|
recipe_scrapers/_abstract.py
|
recipe_scrapers/_abstract.py
|
from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
|
from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
|
Add docstring to the methods structure in the abstract class
|
Add docstring to the methods structure in the abstract class
|
Python
|
mit
|
hhursev/recipe-scraper
|
from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
+ """ get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
+ """ the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
+ """ title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
+ """ total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
+ """ list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
+ """ directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
+ """ social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
|
Add docstring to the methods structure in the abstract class
|
## Code Before:
from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
## Instruction:
Add docstring to the methods structure in the abstract class
## Code After:
from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
|
# ... existing code ...
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
# ... modified code ...
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
...
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
...
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
...
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
...
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
...
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
# ... rest of the code ...
|
1a271575d92a6d7df1bc7dedf346b29a778f2261
|
update.py
|
update.py
|
"""DJRivals database updater."""
from random import shuffle
from time import localtime, sleep, strftime, time
import pop
import dj
import image
import html
def continuous():
"""continuous() -> None
Continuous incremental updates of the DJRivals database.
"""
while(True):
print("Beginning new cycle...\n")
disc_list = list(pop.index().keys())
interval = int(24 * 60 * 60 / len(disc_list))
shuffle(disc_list)
for disc in disc_list:
pop.database([disc])
print("\nNext incremental update at: " + strftime("%H:%M:%S", localtime(time() + interval)))
print("Ctrl-C to quit.\n")
sleep(interval)
dj.database()
image.icons()
html.html()
print("Full database update complete.\n")
def index():
"""index() -> None
Update the index file and retrieve new disc images if available.
"""
pop.index(True)
image.discs()
|
"""DJRivals database updater."""
from collections import OrderedDict
from time import localtime, sleep, strftime, time
import json
from common import _link
import pop
import dj
import image
import html
def continuous():
"""continuous() -> None
Continuous incremental updates of the DJRivals database.
"""
index_file = _link("index_file")
while(True):
print("Beginning new cycle...\n")
disc_list = pop.index()
disc_list = sorted(disc_list.keys(), key=lambda x: disc_list[x]["timestamp"])
interval = int(24 * 60 * 60 / len(disc_list))
for disc in disc_list:
pop.database([disc])
with open(index_file, "rb") as f:
data = json.loads(f.read().decode(), object_pairs_hook=OrderedDict)
data[disc]["timestamp"] = int(time())
with open(index_file, "wb") as f:
f.write(json.dumps(data, indent=4).encode())
print("\nNext incremental update at: " + strftime("%H:%M:%S", localtime(time() + interval)))
print("Ctrl-C to quit.\n")
sleep(interval)
dj.database()
image.icons()
html.html()
print("Full database update complete.\n")
def index():
"""index() -> None
Update the index file and retrieve new disc images if available.
"""
pop.index(True)
image.discs()
|
Sort the disc list by timestamp.
|
Sort the disc list by timestamp.
|
Python
|
bsd-2-clause
|
chingc/DJRivals,chingc/DJRivals
|
"""DJRivals database updater."""
- from random import shuffle
+ from collections import OrderedDict
from time import localtime, sleep, strftime, time
+ import json
+ from common import _link
import pop
import dj
import image
import html
def continuous():
"""continuous() -> None
Continuous incremental updates of the DJRivals database.
"""
+ index_file = _link("index_file")
while(True):
print("Beginning new cycle...\n")
- disc_list = list(pop.index().keys())
+ disc_list = pop.index()
+ disc_list = sorted(disc_list.keys(), key=lambda x: disc_list[x]["timestamp"])
interval = int(24 * 60 * 60 / len(disc_list))
- shuffle(disc_list)
for disc in disc_list:
pop.database([disc])
+ with open(index_file, "rb") as f:
+ data = json.loads(f.read().decode(), object_pairs_hook=OrderedDict)
+ data[disc]["timestamp"] = int(time())
+ with open(index_file, "wb") as f:
+ f.write(json.dumps(data, indent=4).encode())
print("\nNext incremental update at: " + strftime("%H:%M:%S", localtime(time() + interval)))
print("Ctrl-C to quit.\n")
sleep(interval)
dj.database()
image.icons()
html.html()
print("Full database update complete.\n")
def index():
"""index() -> None
Update the index file and retrieve new disc images if available.
"""
pop.index(True)
image.discs()
|
Sort the disc list by timestamp.
|
## Code Before:
"""DJRivals database updater."""
from random import shuffle
from time import localtime, sleep, strftime, time
import pop
import dj
import image
import html
def continuous():
"""continuous() -> None
Continuous incremental updates of the DJRivals database.
"""
while(True):
print("Beginning new cycle...\n")
disc_list = list(pop.index().keys())
interval = int(24 * 60 * 60 / len(disc_list))
shuffle(disc_list)
for disc in disc_list:
pop.database([disc])
print("\nNext incremental update at: " + strftime("%H:%M:%S", localtime(time() + interval)))
print("Ctrl-C to quit.\n")
sleep(interval)
dj.database()
image.icons()
html.html()
print("Full database update complete.\n")
def index():
"""index() -> None
Update the index file and retrieve new disc images if available.
"""
pop.index(True)
image.discs()
## Instruction:
Sort the disc list by timestamp.
## Code After:
"""DJRivals database updater."""
from collections import OrderedDict
from time import localtime, sleep, strftime, time
import json
from common import _link
import pop
import dj
import image
import html
def continuous():
"""continuous() -> None
Continuous incremental updates of the DJRivals database.
"""
index_file = _link("index_file")
while(True):
print("Beginning new cycle...\n")
disc_list = pop.index()
disc_list = sorted(disc_list.keys(), key=lambda x: disc_list[x]["timestamp"])
interval = int(24 * 60 * 60 / len(disc_list))
for disc in disc_list:
pop.database([disc])
with open(index_file, "rb") as f:
data = json.loads(f.read().decode(), object_pairs_hook=OrderedDict)
data[disc]["timestamp"] = int(time())
with open(index_file, "wb") as f:
f.write(json.dumps(data, indent=4).encode())
print("\nNext incremental update at: " + strftime("%H:%M:%S", localtime(time() + interval)))
print("Ctrl-C to quit.\n")
sleep(interval)
dj.database()
image.icons()
html.html()
print("Full database update complete.\n")
def index():
"""index() -> None
Update the index file and retrieve new disc images if available.
"""
pop.index(True)
image.discs()
|
# ... existing code ...
"""DJRivals database updater."""
from collections import OrderedDict
from time import localtime, sleep, strftime, time
import json
from common import _link
import pop
# ... modified code ...
"""
index_file = _link("index_file")
while(True):
...
print("Beginning new cycle...\n")
disc_list = pop.index()
disc_list = sorted(disc_list.keys(), key=lambda x: disc_list[x]["timestamp"])
interval = int(24 * 60 * 60 / len(disc_list))
for disc in disc_list:
...
pop.database([disc])
with open(index_file, "rb") as f:
data = json.loads(f.read().decode(), object_pairs_hook=OrderedDict)
data[disc]["timestamp"] = int(time())
with open(index_file, "wb") as f:
f.write(json.dumps(data, indent=4).encode())
print("\nNext incremental update at: " + strftime("%H:%M:%S", localtime(time() + interval)))
# ... rest of the code ...
|
54d67ce544e95ecb58a62062ffe50fcd95db6f09
|
sso/apps.py
|
sso/apps.py
|
from django.apps import AppConfig
class SsoConfig(AppConfig):
name = 'sso'
github_client_id = '844189c44c56ff04e727'
github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc'
|
import json
import os.path
from django.apps import AppConfig
from fmproject import settings
class SsoConfig(AppConfig):
base_config = json.load(
open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json'))
)
name = 'sso'
github_client_id = base_config['github']['client_id']
github_client_secret = base_config['github']['client_secret']
|
Load github config from external file
|
Load github config from external file
|
Python
|
mit
|
favoritemedium/sso-prototype,favoritemedium/sso-prototype
|
+ import json
+ import os.path
from django.apps import AppConfig
+ from fmproject import settings
class SsoConfig(AppConfig):
+ base_config = json.load(
+ open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json'))
+ )
name = 'sso'
- github_client_id = '844189c44c56ff04e727'
- github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc'
+ github_client_id = base_config['github']['client_id']
+ github_client_secret = base_config['github']['client_secret']
+
|
Load github config from external file
|
## Code Before:
from django.apps import AppConfig
class SsoConfig(AppConfig):
name = 'sso'
github_client_id = '844189c44c56ff04e727'
github_client_secret = '0bfecee7a78ee0e800b6bff85b08c140b91be4cc'
## Instruction:
Load github config from external file
## Code After:
import json
import os.path
from django.apps import AppConfig
from fmproject import settings
class SsoConfig(AppConfig):
base_config = json.load(
open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json'))
)
name = 'sso'
github_client_id = base_config['github']['client_id']
github_client_secret = base_config['github']['client_secret']
|
...
import json
import os.path
from django.apps import AppConfig
from fmproject import settings
...
class SsoConfig(AppConfig):
base_config = json.load(
open(os.path.join(settings.BASE_DIR, 'fmproject', 'config.json'))
)
name = 'sso'
github_client_id = base_config['github']['client_id']
github_client_secret = base_config['github']['client_secret']
...
|
dcc2821cac0619fc2ca5f486ad30416f3c3cfda9
|
ce/expr/parser.py
|
ce/expr/parser.py
|
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
def _parse_r(s):
s = s.strip()
bracket_level = 0
operator_pos = -1
for i, v in enumerate(s):
if v == '(':
bracket_level += 1
if v == ')':
bracket_level -= 1
if bracket_level == 1 and v in OPERATORS:
operator_pos = i
break
if operator_pos == -1:
return s
a1 = _parse_r(s[1:operator_pos].strip())
a2 = _parse_r(s[operator_pos + 1:-1].strip())
return Expr(s[operator_pos], a1, a2)
|
import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
OPERATOR_MAP = {
ast.Add: ADD_OP,
ast.Mult: MULTIPLY_OP,
}
def parse(s):
from .biop import Expr
def _parse_r(t):
try:
return t.n
except AttributeError:
pass
try:
return t.id
except AttributeError:
op = OPERATOR_MAP[t.op.__class__]
a1 = _parse_r(t.left)
a2 = _parse_r(t.right)
return Expr(op, a1, a2)
return _parse_r(ast.parse(s, mode='eval').body)
|
Replace parsing with Python's ast
|
Replace parsing with Python's ast
Allows greater flexibility and syntax checks
|
Python
|
mit
|
admk/soap
|
+
+ import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
+ OPERATOR_MAP = {
+ ast.Add: ADD_OP,
+ ast.Mult: MULTIPLY_OP,
+ }
- def _parse_r(s):
- s = s.strip()
- bracket_level = 0
- operator_pos = -1
- for i, v in enumerate(s):
- if v == '(':
- bracket_level += 1
- if v == ')':
- bracket_level -= 1
- if bracket_level == 1 and v in OPERATORS:
- operator_pos = i
- break
- if operator_pos == -1:
- return s
- a1 = _parse_r(s[1:operator_pos].strip())
- a2 = _parse_r(s[operator_pos + 1:-1].strip())
- return Expr(s[operator_pos], a1, a2)
+
+ def parse(s):
+ from .biop import Expr
+ def _parse_r(t):
+ try:
+ return t.n
+ except AttributeError:
+ pass
+ try:
+ return t.id
+ except AttributeError:
+ op = OPERATOR_MAP[t.op.__class__]
+ a1 = _parse_r(t.left)
+ a2 = _parse_r(t.right)
+ return Expr(op, a1, a2)
+ return _parse_r(ast.parse(s, mode='eval').body)
+
|
Replace parsing with Python's ast
|
## Code Before:
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
def _parse_r(s):
s = s.strip()
bracket_level = 0
operator_pos = -1
for i, v in enumerate(s):
if v == '(':
bracket_level += 1
if v == ')':
bracket_level -= 1
if bracket_level == 1 and v in OPERATORS:
operator_pos = i
break
if operator_pos == -1:
return s
a1 = _parse_r(s[1:operator_pos].strip())
a2 = _parse_r(s[operator_pos + 1:-1].strip())
return Expr(s[operator_pos], a1, a2)
## Instruction:
Replace parsing with Python's ast
## Code After:
import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
OPERATOR_MAP = {
ast.Add: ADD_OP,
ast.Mult: MULTIPLY_OP,
}
def parse(s):
from .biop import Expr
def _parse_r(t):
try:
return t.n
except AttributeError:
pass
try:
return t.id
except AttributeError:
op = OPERATOR_MAP[t.op.__class__]
a1 = _parse_r(t.left)
a2 = _parse_r(t.right)
return Expr(op, a1, a2)
return _parse_r(ast.parse(s, mode='eval').body)
|
// ... existing code ...
import ast
// ... modified code ...
OPERATOR_MAP = {
ast.Add: ADD_OP,
ast.Mult: MULTIPLY_OP,
}
def parse(s):
from .biop import Expr
def _parse_r(t):
try:
return t.n
except AttributeError:
pass
try:
return t.id
except AttributeError:
op = OPERATOR_MAP[t.op.__class__]
a1 = _parse_r(t.left)
a2 = _parse_r(t.right)
return Expr(op, a1, a2)
return _parse_r(ast.parse(s, mode='eval').body)
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.