commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
ef2618e25cc6dfed119da8d0d4d3c26f2832a33b
ds/utils/logbuffer.py
ds/utils/logbuffer.py
from __future__ import absolute_import from tempfile import NamedTemporaryFile class LogBuffer(object): def __init__(self, chunk_size=4096): self.chunk_size = chunk_size self.fp = NamedTemporaryFile() def fileno(self): return self.fp.fileno() def write(self, chunk): self.fp.write(chunk) self.fp.flush() def close(self, force=False): if force: self.fp.close() def flush(self): self.fp.flush() def iter_chunks(self): self.flush() chunk_size = self.chunk_size result = '' offset = 0 with open(self.fp.name) as fp: for chunk in fp: result += chunk while len(result) >= chunk_size: newline_pos = result.rfind('\n', 0, chunk_size) if newline_pos == -1: newline_pos = chunk_size else: newline_pos += 1 yield (offset, result[:newline_pos]) offset += newline_pos result = result[newline_pos:] if result: yield(offset, result) self.close(True)
from __future__ import absolute_import from tempfile import NamedTemporaryFile class LogBuffer(object): def __init__(self, chunk_size=4096): self.chunk_size = chunk_size self.fp = NamedTemporaryFile() def fileno(self): return self.fp.fileno() def write(self, chunk): self.fp.write(chunk) self.fp.flush() def close(self, force=False): self.fp.flush() if force: self.fp.close() def flush(self): self.fp.flush() def iter_chunks(self): chunk_size = self.chunk_size result = '' offset = 0 with open(self.fp.name) as fp: for chunk in fp: result += chunk while len(result) >= chunk_size: newline_pos = result.rfind('\n', 0, chunk_size) if newline_pos == -1: newline_pos = chunk_size else: newline_pos += 1 yield (offset, result[:newline_pos]) offset += newline_pos result = result[newline_pos:] if result: yield(offset, result) self.close(True)
Move flush logic into close
Move flush logic into close
Python
apache-2.0
jkimbo/freight,jkimbo/freight,getsentry/freight,klynton/freight,jkimbo/freight,getsentry/freight,klynton/freight,klynton/freight,rshk/freight,jkimbo/freight,getsentry/freight,klynton/freight,getsentry/freight,getsentry/freight,rshk/freight,rshk/freight,rshk/freight
from __future__ import absolute_import from tempfile import NamedTemporaryFile class LogBuffer(object): def __init__(self, chunk_size=4096): self.chunk_size = chunk_size self.fp = NamedTemporaryFile() def fileno(self): return self.fp.fileno() def write(self, chunk): self.fp.write(chunk) self.fp.flush() def close(self, force=False): + self.fp.flush() if force: self.fp.close() def flush(self): self.fp.flush() def iter_chunks(self): - self.flush() - chunk_size = self.chunk_size result = '' offset = 0 with open(self.fp.name) as fp: for chunk in fp: result += chunk while len(result) >= chunk_size: newline_pos = result.rfind('\n', 0, chunk_size) if newline_pos == -1: newline_pos = chunk_size else: newline_pos += 1 yield (offset, result[:newline_pos]) offset += newline_pos result = result[newline_pos:] if result: yield(offset, result) self.close(True)
Move flush logic into close
## Code Before: from __future__ import absolute_import from tempfile import NamedTemporaryFile class LogBuffer(object): def __init__(self, chunk_size=4096): self.chunk_size = chunk_size self.fp = NamedTemporaryFile() def fileno(self): return self.fp.fileno() def write(self, chunk): self.fp.write(chunk) self.fp.flush() def close(self, force=False): if force: self.fp.close() def flush(self): self.fp.flush() def iter_chunks(self): self.flush() chunk_size = self.chunk_size result = '' offset = 0 with open(self.fp.name) as fp: for chunk in fp: result += chunk while len(result) >= chunk_size: newline_pos = result.rfind('\n', 0, chunk_size) if newline_pos == -1: newline_pos = chunk_size else: newline_pos += 1 yield (offset, result[:newline_pos]) offset += newline_pos result = result[newline_pos:] if result: yield(offset, result) self.close(True) ## Instruction: Move flush logic into close ## Code After: from __future__ import absolute_import from tempfile import NamedTemporaryFile class LogBuffer(object): def __init__(self, chunk_size=4096): self.chunk_size = chunk_size self.fp = NamedTemporaryFile() def fileno(self): return self.fp.fileno() def write(self, chunk): self.fp.write(chunk) self.fp.flush() def close(self, force=False): self.fp.flush() if force: self.fp.close() def flush(self): self.fp.flush() def iter_chunks(self): chunk_size = self.chunk_size result = '' offset = 0 with open(self.fp.name) as fp: for chunk in fp: result += chunk while len(result) >= chunk_size: newline_pos = result.rfind('\n', 0, chunk_size) if newline_pos == -1: newline_pos = chunk_size else: newline_pos += 1 yield (offset, result[:newline_pos]) offset += newline_pos result = result[newline_pos:] if result: yield(offset, result) self.close(True)
from __future__ import absolute_import from tempfile import NamedTemporaryFile class LogBuffer(object): def __init__(self, chunk_size=4096): self.chunk_size = chunk_size self.fp = NamedTemporaryFile() def fileno(self): return self.fp.fileno() def write(self, chunk): self.fp.write(chunk) self.fp.flush() def close(self, force=False): + self.fp.flush() if force: self.fp.close() def flush(self): self.fp.flush() def iter_chunks(self): - self.flush() - chunk_size = self.chunk_size result = '' offset = 0 with open(self.fp.name) as fp: for chunk in fp: result += chunk while len(result) >= chunk_size: newline_pos = result.rfind('\n', 0, chunk_size) if newline_pos == -1: newline_pos = chunk_size else: newline_pos += 1 yield (offset, result[:newline_pos]) offset += newline_pos result = result[newline_pos:] if result: yield(offset, result) self.close(True)
c2a1ce0ad4e2f2e9ff5ec72b89eb98967e445ea5
labsys/utils/custom_fields.py
labsys/utils/custom_fields.py
from wtforms.fields import RadioField class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): if isinstance(value, bool) is False and value is not None: self.data = None else: self.data = value def process_formdata(self, valuelist): if valuelist: try: self.data = valuelist[0] except ValueError: raise ValueError(self.gettext('Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
from wtforms.fields import RadioField class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) TRUE_VALUES = ('True', 'true') FALSE_VALUES = ('False', 'false') NONE_VALUES = ('None', 'none', 'null', '') def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): if value not in (True, False): self.data = None else: self.data = value def _parse_str_to_null_bool(self, input_str): if input_str in self.TRUE_VALUES: return True if input_str in self.FALSE_VALUES: return False if input_str in self.NONE_VALUES: return None raise ValueError def process_formdata(self, valuelist): if valuelist: try: self.data = self._parse_str_to_null_bool(valuelist[0]) except ValueError: raise ValueError(self.gettext( 'Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
Improve NullBooleanField with Truthy/Falsy values
:art: Improve NullBooleanField with Truthy/Falsy values
Python
mit
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys
from wtforms.fields import RadioField + class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) + TRUE_VALUES = ('True', 'true') + FALSE_VALUES = ('False', 'false') + NONE_VALUES = ('None', 'none', 'null', '') + def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): - if isinstance(value, bool) is False and value is not None: + if value not in (True, False): self.data = None else: self.data = value + def _parse_str_to_null_bool(self, input_str): + if input_str in self.TRUE_VALUES: + return True + if input_str in self.FALSE_VALUES: + return False + if input_str in self.NONE_VALUES: + return None + raise ValueError + def process_formdata(self, valuelist): if valuelist: try: - self.data = valuelist[0] + self.data = self._parse_str_to_null_bool(valuelist[0]) except ValueError: + raise ValueError(self.gettext( - raise ValueError(self.gettext('Invalid Choice: could not coerce')) + 'Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
Improve NullBooleanField with Truthy/Falsy values
## Code Before: from wtforms.fields import RadioField class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): if isinstance(value, bool) is False and value is not None: self.data = None else: self.data = value def process_formdata(self, valuelist): if valuelist: try: self.data = valuelist[0] except ValueError: raise ValueError(self.gettext('Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice')) ## Instruction: Improve NullBooleanField with Truthy/Falsy values ## Code After: from wtforms.fields import RadioField class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) TRUE_VALUES = ('True', 'true') FALSE_VALUES = ('False', 'false') NONE_VALUES = ('None', 'none', 'null', '') def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): if value not in (True, False): self.data = None else: self.data = value def _parse_str_to_null_bool(self, input_str): if input_str in self.TRUE_VALUES: return True if input_str in self.FALSE_VALUES: return False if input_str in self.NONE_VALUES: return None raise ValueError def process_formdata(self, valuelist): if valuelist: try: self.data = self._parse_str_to_null_bool(valuelist[0]) except ValueError: raise ValueError(self.gettext( 'Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
from wtforms.fields import RadioField + class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) + TRUE_VALUES = ('True', 'true') + FALSE_VALUES = ('False', 'false') + NONE_VALUES = ('None', 'none', 'null', '') + def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): - if isinstance(value, bool) is False and value is not None: + if value not in (True, False): self.data = None else: self.data = value + def _parse_str_to_null_bool(self, input_str): + if input_str in self.TRUE_VALUES: + return True + if input_str in self.FALSE_VALUES: + return False + if input_str in self.NONE_VALUES: + return None + raise ValueError + def process_formdata(self, valuelist): if valuelist: try: - self.data = valuelist[0] + self.data = self._parse_str_to_null_bool(valuelist[0]) except ValueError: + raise ValueError(self.gettext( - raise ValueError(self.gettext('Invalid Choice: could not coerce')) ? ----- ^^^^^^^^^^^^^^^^^^^^^^^^ + 'Invalid Choice: could not coerce')) ? ^^^ def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
c135e9ac8fead8e9e58d2f34e5aa66354bd1b996
tests/test_route_requester.py
tests/test_route_requester.py
import unittest from pydirections.route_requester import DirectionsRequest from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA") class TestOptionalParameters(unittest.TestCase): def test_invalid_mode(self): """ Tests the is_valid_mode function for an invalid input """ with self.assertRaises(InvalidModeError): requester.set_mode("flying") def test_invalid_alternative(self): """ Tests for error handling when an invalid value is provided to the set_alternative function """ with self.assertRaises(InvalidAlternativeError): requester.set_alternatives('False') def test_invalid_restrictions(self): """ Tests for invalid route restrictions """ with self.assertRaises(ValueError): requester.set_route_restrictions("freeways", "railways") class TestAPIKey(unittest.TestCase): def test_invalid_api_key(self): invalid_key = 123456 with self.assertRaises(InvalidAPIKeyError): requester.set_api_key(invalid_key) if __name__ == '__main__': unittest.main()
import unittest from pydirections.route_requester import DirectionsRequest from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError import os MAPS_API_KEY = os.environ['MAPS_API_KEY'] class TestOptionalParameters(unittest.TestCase): def test_invalid_mode(self): """ Tests the is_valid_mode function for an invalid input """ requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidModeError): requester.set_mode("flying") def test_invalid_alternative(self): """ Tests for error handling when an invalid value is provided to the set_alternative function """ requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidAlternativeError): requester.set_alternatives('False') def test_invalid_restrictions(self): """ Tests for invalid route restrictions """ requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(ValueError): requester.set_route_restrictions("freeways", "railways") class TestAPIKey(unittest.TestCase): def test_invalid_api_key(self): requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) invalid_key = 123456 with self.assertRaises(InvalidAPIKeyError): requester.set_api_key(invalid_key) if __name__ == '__main__': unittest.main()
Refactor tests to include API KEY
Refactor tests to include API KEY
Python
apache-2.0
apranav19/pydirections
import unittest from pydirections.route_requester import DirectionsRequest from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError + import os - requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA") + MAPS_API_KEY = os.environ['MAPS_API_KEY'] class TestOptionalParameters(unittest.TestCase): def test_invalid_mode(self): """ Tests the is_valid_mode function for an invalid input """ + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidModeError): requester.set_mode("flying") def test_invalid_alternative(self): """ Tests for error handling when an invalid value is provided to the set_alternative function """ + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidAlternativeError): requester.set_alternatives('False') def test_invalid_restrictions(self): """ Tests for invalid route restrictions """ + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(ValueError): requester.set_route_restrictions("freeways", "railways") class TestAPIKey(unittest.TestCase): - def test_invalid_api_key(self): + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) invalid_key = 123456 with self.assertRaises(InvalidAPIKeyError): requester.set_api_key(invalid_key) if __name__ == '__main__': unittest.main()
Refactor tests to include API KEY
## Code Before: import unittest from pydirections.route_requester import DirectionsRequest from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA") class TestOptionalParameters(unittest.TestCase): def test_invalid_mode(self): """ Tests the is_valid_mode function for an invalid input """ with self.assertRaises(InvalidModeError): requester.set_mode("flying") def test_invalid_alternative(self): """ Tests for error handling when an invalid value is provided to the set_alternative function """ with self.assertRaises(InvalidAlternativeError): requester.set_alternatives('False') def test_invalid_restrictions(self): """ Tests for invalid route restrictions """ with self.assertRaises(ValueError): requester.set_route_restrictions("freeways", "railways") class TestAPIKey(unittest.TestCase): def test_invalid_api_key(self): invalid_key = 123456 with self.assertRaises(InvalidAPIKeyError): requester.set_api_key(invalid_key) if __name__ == '__main__': unittest.main() ## Instruction: Refactor tests to include API KEY ## Code After: import unittest from pydirections.route_requester import DirectionsRequest from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError import os MAPS_API_KEY = os.environ['MAPS_API_KEY'] class TestOptionalParameters(unittest.TestCase): def test_invalid_mode(self): """ Tests the is_valid_mode function for an invalid input """ requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidModeError): requester.set_mode("flying") def test_invalid_alternative(self): """ Tests for error handling when an invalid value is provided to the set_alternative function """ requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidAlternativeError): requester.set_alternatives('False') def test_invalid_restrictions(self): """ Tests for invalid route restrictions """ requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(ValueError): requester.set_route_restrictions("freeways", "railways") class TestAPIKey(unittest.TestCase): def test_invalid_api_key(self): requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) invalid_key = 123456 with self.assertRaises(InvalidAPIKeyError): requester.set_api_key(invalid_key) if __name__ == '__main__': unittest.main()
import unittest from pydirections.route_requester import DirectionsRequest from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError + import os - requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA") + MAPS_API_KEY = os.environ['MAPS_API_KEY'] class TestOptionalParameters(unittest.TestCase): def test_invalid_mode(self): """ Tests the is_valid_mode function for an invalid input """ + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidModeError): requester.set_mode("flying") def test_invalid_alternative(self): """ Tests for error handling when an invalid value is provided to the set_alternative function """ + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(InvalidAlternativeError): requester.set_alternatives('False') def test_invalid_restrictions(self): """ Tests for invalid route restrictions """ + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) with self.assertRaises(ValueError): requester.set_route_restrictions("freeways", "railways") class TestAPIKey(unittest.TestCase): - def test_invalid_api_key(self): + requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA", key=MAPS_API_KEY) invalid_key = 123456 with self.assertRaises(InvalidAPIKeyError): requester.set_api_key(invalid_key) if __name__ == '__main__': unittest.main()
b7f3ee836cb73d274bfd7dc415bb43e2fa743e12
httpserverhandler.py
httpserverhandler.py
from BaseHTTPServer import BaseHTTPRequestHandler import mimetypes from os import curdir, sep import os class HttpServerHandler(BaseHTTPRequestHandler): allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff'] def has_permission_to_reply(self, file_path): file_name, file_extension = os.path.splitext(file_path) send_reply = file_extension in self.allowed_extensions mimetype = mimetypes.guess_type(file_name + file_extension) return mimetype, send_reply def do_GET(self): file_path = self.path if file_path == "/": file_path = "/index.html" try: mimetype, send_reply = self.has_permission_to_reply(file_path) if True: full_path = curdir + sep + "pages" + sep + file_path f = open(full_path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % file_path)
from BaseHTTPServer import BaseHTTPRequestHandler import mimetypes from os import curdir, sep import os class HttpServerHandler(BaseHTTPRequestHandler): allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff'] def has_permission_to_reply(self, file_path): file_name, file_extension = os.path.splitext(file_path) send_reply = file_extension in self.allowed_extensions mimetype = mimetypes.guess_type(file_name + file_extension) return mimetype, send_reply def do_GET(self): file_path = self.path if file_path == "/": file_path = "/index.html" try: mimetype, send_reply = self.has_permission_to_reply(file_path) if send_reply: full_path = curdir + sep + "pages" + sep + file_path f = open(full_path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % file_path)
Add .ico to the allowed extension list.
Add .ico to the allowed extension list.
Python
apache-2.0
gearlles/planb-client,gearlles/planb-client,gearlles/planb-client
from BaseHTTPServer import BaseHTTPRequestHandler import mimetypes from os import curdir, sep import os class HttpServerHandler(BaseHTTPRequestHandler): - allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff'] + allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff'] def has_permission_to_reply(self, file_path): file_name, file_extension = os.path.splitext(file_path) send_reply = file_extension in self.allowed_extensions mimetype = mimetypes.guess_type(file_name + file_extension) return mimetype, send_reply def do_GET(self): file_path = self.path if file_path == "/": file_path = "/index.html" try: mimetype, send_reply = self.has_permission_to_reply(file_path) - if True: + if send_reply: full_path = curdir + sep + "pages" + sep + file_path f = open(full_path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % file_path)
Add .ico to the allowed extension list.
## Code Before: from BaseHTTPServer import BaseHTTPRequestHandler import mimetypes from os import curdir, sep import os class HttpServerHandler(BaseHTTPRequestHandler): allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff'] def has_permission_to_reply(self, file_path): file_name, file_extension = os.path.splitext(file_path) send_reply = file_extension in self.allowed_extensions mimetype = mimetypes.guess_type(file_name + file_extension) return mimetype, send_reply def do_GET(self): file_path = self.path if file_path == "/": file_path = "/index.html" try: mimetype, send_reply = self.has_permission_to_reply(file_path) if True: full_path = curdir + sep + "pages" + sep + file_path f = open(full_path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % file_path) ## Instruction: Add .ico to the allowed extension list. ## Code After: from BaseHTTPServer import BaseHTTPRequestHandler import mimetypes from os import curdir, sep import os class HttpServerHandler(BaseHTTPRequestHandler): allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff'] def has_permission_to_reply(self, file_path): file_name, file_extension = os.path.splitext(file_path) send_reply = file_extension in self.allowed_extensions mimetype = mimetypes.guess_type(file_name + file_extension) return mimetype, send_reply def do_GET(self): file_path = self.path if file_path == "/": file_path = "/index.html" try: mimetype, send_reply = self.has_permission_to_reply(file_path) if send_reply: full_path = curdir + sep + "pages" + sep + file_path f = open(full_path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % file_path)
from BaseHTTPServer import BaseHTTPRequestHandler import mimetypes from os import curdir, sep import os class HttpServerHandler(BaseHTTPRequestHandler): - allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff'] + allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff'] ? ++++++++ def has_permission_to_reply(self, file_path): file_name, file_extension = os.path.splitext(file_path) send_reply = file_extension in self.allowed_extensions mimetype = mimetypes.guess_type(file_name + file_extension) return mimetype, send_reply def do_GET(self): file_path = self.path if file_path == "/": file_path = "/index.html" try: mimetype, send_reply = self.has_permission_to_reply(file_path) - if True: ? ^ - + if send_reply: ? ^^^^^ +++ full_path = curdir + sep + "pages" + sep + file_path f = open(full_path) self.send_response(200) self.send_header('Content-type', mimetype) self.end_headers() self.wfile.write(f.read()) f.close() return except IOError: self.send_error(404, 'File Not Found: %s' % file_path)
b059f5128576d468ab0109da8d01bfdc50f6db56
accelerator/tests/contexts/analyze_judging_context.py
accelerator/tests/contexts/analyze_judging_context.py
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type="reads", name="reads", read_count=1, options=[""], is_active=True): super().__init__(is_active=is_active) self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
Add is_active and default args to AnalyzeJudgingContext
Add is_active and default args to AnalyzeJudgingContext
Python
mit
masschallenge/django-accelerator,masschallenge/django-accelerator
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): - def __init__(self, type, name, read_count, options): - super().__init__() + def __init__(self, + type="reads", + name="reads", + read_count=1, + options=[""], + is_active=True): + super().__init__(is_active=is_active) self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
Add is_active and default args to AnalyzeJudgingContext
## Code Before: from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type, name, read_count, options): super().__init__() self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts]) ## Instruction: Add is_active and default args to AnalyzeJudgingContext ## Code After: from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): def __init__(self, type="reads", name="reads", read_count=1, options=[""], is_active=True): super().__init__(is_active=is_active) self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
from accelerator.tests.factories import ( CriterionFactory, CriterionOptionSpecFactory, ) from accelerator.tests.contexts.judge_feedback_context import ( JudgeFeedbackContext, ) from accelerator.models import ( JUDGING_FEEDBACK_STATUS_COMPLETE, JudgeApplicationFeedback, ) class AnalyzeJudgingContext(JudgeFeedbackContext): - def __init__(self, type, name, read_count, options): - super().__init__() + def __init__(self, + type="reads", + name="reads", + read_count=1, + options=[""], + is_active=True): + super().__init__(is_active=is_active) self.read_count = read_count self.options = options self.feedback.feedback_status = JUDGING_FEEDBACK_STATUS_COMPLETE self.feedback.save() self.add_application() # Add unread app self.criterion = CriterionFactory(type=type, name=name, judging_round=self.judging_round) self.option_specs = [CriterionOptionSpecFactory( criterion=self.criterion, count=read_count, option=option) for option in options] def needed_reads(self): return (self.read_count * len(self.applications) - self.feedback_count()) def feedback_count(self): counts = [JudgeApplicationFeedback.objects.filter( application=app, feedback_status=JUDGING_FEEDBACK_STATUS_COMPLETE).count() for app in self.applications] return sum([min(self.read_count, count) for count in counts])
db0ef5f31d82729f654d1d07fa39c1168aa5e5f7
tests/__init__.py
tests/__init__.py
__version__ = '$Id$' import os import pywikibot.data.api from pywikibot.data.api import Request as _original_Request from pywikibot.data.api import CachedRequest class TestRequest(CachedRequest): def __init__(self, *args, **kwargs): super(TestRequest, self).__init__(0, *args, **kwargs) def _get_cache_dir(self): path = os.path.join(os.path.split(__file__)[0], 'apicache') self._make_dir(path) return path def _expired(self, dt): return False def submit(self): cached_available = self._load_cache() if not cached_available: print str(self) return super(TestRequest, self).submit() def patch_request(): pywikibot.data.api.Request = TestRequest def unpatch_request(): pywikibot.data.api.Request = _original_Request
__version__ = '$Id$' import os import pywikibot.data.api from pywikibot.data.api import Request as _original_Request from pywikibot.data.api import CachedRequest class TestRequest(CachedRequest): def __init__(self, *args, **kwargs): super(TestRequest, self).__init__(0, *args, **kwargs) def _get_cache_dir(self): path = os.path.join(os.path.split(__file__)[0], 'apicache') self._make_dir(path) return path def _expired(self, dt): return False def patch_request(): pywikibot.data.api.Request = TestRequest def unpatch_request(): pywikibot.data.api.Request = _original_Request
Disable printing of API parameters which could leak private info and were generally not useful
Disable printing of API parameters which could leak private info and were generally not useful Change-Id: I8a8a8d10799df4f61e4465fe60b6c7efbefbd962
Python
mit
npdoty/pywikibot,PersianWikipedia/pywikibot-core,trishnaguha/pywikibot-core,jayvdb/pywikibot-core,emijrp/pywikibot-core,wikimedia/pywikibot-core,xZise/pywikibot-core,happy5214/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,magul/pywikibot-core,h4ck3rm1k3/pywikibot-core,TridevGuha/pywikibot-core,hasteur/g13bot_tools_new,smalyshev/pywikibot-core,hasteur/g13bot_tools_new,npdoty/pywikibot,Darkdadaah/pywikibot-core,h4ck3rm1k3/pywikibot-core,magul/pywikibot-core,happy5214/pywikibot-core,wikimedia/pywikibot-core,Darkdadaah/pywikibot-core,valhallasw/pywikibot-core,VcamX/pywikibot-core,icyflame/batman,darthbhyrava/pywikibot-local
__version__ = '$Id$' import os import pywikibot.data.api from pywikibot.data.api import Request as _original_Request from pywikibot.data.api import CachedRequest class TestRequest(CachedRequest): def __init__(self, *args, **kwargs): super(TestRequest, self).__init__(0, *args, **kwargs) def _get_cache_dir(self): path = os.path.join(os.path.split(__file__)[0], 'apicache') self._make_dir(path) return path def _expired(self, dt): return False - def submit(self): - cached_available = self._load_cache() - if not cached_available: - print str(self) - return super(TestRequest, self).submit() - - def patch_request(): pywikibot.data.api.Request = TestRequest def unpatch_request(): pywikibot.data.api.Request = _original_Request
Disable printing of API parameters which could leak private info and were generally not useful
## Code Before: __version__ = '$Id$' import os import pywikibot.data.api from pywikibot.data.api import Request as _original_Request from pywikibot.data.api import CachedRequest class TestRequest(CachedRequest): def __init__(self, *args, **kwargs): super(TestRequest, self).__init__(0, *args, **kwargs) def _get_cache_dir(self): path = os.path.join(os.path.split(__file__)[0], 'apicache') self._make_dir(path) return path def _expired(self, dt): return False def submit(self): cached_available = self._load_cache() if not cached_available: print str(self) return super(TestRequest, self).submit() def patch_request(): pywikibot.data.api.Request = TestRequest def unpatch_request(): pywikibot.data.api.Request = _original_Request ## Instruction: Disable printing of API parameters which could leak private info and were generally not useful ## Code After: __version__ = '$Id$' import os import pywikibot.data.api from pywikibot.data.api import Request as _original_Request from pywikibot.data.api import CachedRequest class TestRequest(CachedRequest): def __init__(self, *args, **kwargs): super(TestRequest, self).__init__(0, *args, **kwargs) def _get_cache_dir(self): path = os.path.join(os.path.split(__file__)[0], 'apicache') self._make_dir(path) return path def _expired(self, dt): return False def patch_request(): pywikibot.data.api.Request = TestRequest def unpatch_request(): pywikibot.data.api.Request = _original_Request
__version__ = '$Id$' import os import pywikibot.data.api from pywikibot.data.api import Request as _original_Request from pywikibot.data.api import CachedRequest class TestRequest(CachedRequest): def __init__(self, *args, **kwargs): super(TestRequest, self).__init__(0, *args, **kwargs) def _get_cache_dir(self): path = os.path.join(os.path.split(__file__)[0], 'apicache') self._make_dir(path) return path def _expired(self, dt): return False - def submit(self): - cached_available = self._load_cache() - if not cached_available: - print str(self) - return super(TestRequest, self).submit() - - def patch_request(): pywikibot.data.api.Request = TestRequest def unpatch_request(): pywikibot.data.api.Request = _original_Request
e0f296e776e2aaed2536eeebfb4900a23973aaf5
tests/test_json.py
tests/test_json.py
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
Add '*.json' file extensions to test pattern list.
Add '*.json' file extensions to test pattern list.
Python
mit
jonlabelle/SublimeJsPrettier,jonlabelle/SublimeJsPrettier
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', + '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
Add '*.json' file extensions to test pattern list.
## Code Before: from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f) ## Instruction: Add '*.json' file extensions to test pattern list. ## Code After: from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', + '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
48213f561c802e5279770cc833a9a5a68575bf72
inventory.py
inventory.py
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')); @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('index')) return render_template('login.html') if __name__ == '__main__': db.connect() app.run()
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')); @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': #session['username'] = request.form['username'] return "success"#redirect(url_for('index')) return render_template('login.html') if __name__ == '__main__': db.connect() app.run()
Add debug code to test login post
Add debug code to test login post
Python
mit
lcdi/Inventory,lcdi/Inventory,lcdi/Inventory,lcdi/Inventory
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')); @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': - session['username'] = request.form['username'] + #session['username'] = request.form['username'] - return redirect(url_for('index')) + return "success"#redirect(url_for('index')) return render_template('login.html') if __name__ == '__main__': db.connect() app.run()
Add debug code to test login post
## Code Before: from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')); @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('index')) return render_template('login.html') if __name__ == '__main__': db.connect() app.run() ## Instruction: Add debug code to test login post ## Code After: from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')); @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': #session['username'] = request.form['username'] return "success"#redirect(url_for('index')) return render_template('login.html') if __name__ == '__main__': db.connect() app.run()
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')); @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': - session['username'] = request.form['username'] + #session['username'] = request.form['username'] ? + - return redirect(url_for('index')) + return "success"#redirect(url_for('index')) ? ++++++++++ return render_template('login.html') if __name__ == '__main__': db.connect() app.run()
e049017d8abfdee80a0d825af996cb5de2d63657
commands/seen.py
commands/seen.py
@command("seen") def seen(nick,user,channel,message): with db as conn: with conn.cursor() as cursor: cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,)) row = cursor.fetchone() if row == None: say(channel, "No record of {}".format(message)) else: time = row[0] nick = row[1] msg = row[2] chan = row[3] say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan)) # 3 (minutes ago) on 4, 1, 2 ,0 #
@command("seen") def seen(nick,user,channel,message): if db == None: return with db as conn: with conn.cursor() as cursor: cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,)) row = cursor.fetchone() if row == None: say(channel, "No record of {}".format(message)) else: time = row[0] nick = row[1] msg = row[2] chan = row[3] say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan)) # 3 (minutes ago) on 4, 1, 2 ,0 #
Handle case where db is not connected
Handle case where db is not connected
Python
unlicense
ccowmu/botler
@command("seen") def seen(nick,user,channel,message): + if db == None: return with db as conn: with conn.cursor() as cursor: cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,)) row = cursor.fetchone() if row == None: say(channel, "No record of {}".format(message)) else: time = row[0] nick = row[1] msg = row[2] chan = row[3] say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan)) # 3 (minutes ago) on 4, 1, 2 ,0 #
Handle case where db is not connected
## Code Before: @command("seen") def seen(nick,user,channel,message): with db as conn: with conn.cursor() as cursor: cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,)) row = cursor.fetchone() if row == None: say(channel, "No record of {}".format(message)) else: time = row[0] nick = row[1] msg = row[2] chan = row[3] say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan)) # 3 (minutes ago) on 4, 1, 2 ,0 # ## Instruction: Handle case where db is not connected ## Code After: @command("seen") def seen(nick,user,channel,message): if db == None: return with db as conn: with conn.cursor() as cursor: cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,)) row = cursor.fetchone() if row == None: say(channel, "No record of {}".format(message)) else: time = row[0] nick = row[1] msg = row[2] chan = row[3] say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan)) # 3 (minutes ago) on 4, 1, 2 ,0 #
@command("seen") def seen(nick,user,channel,message): + if db == None: return with db as conn: with conn.cursor() as cursor: cursor.execute("SELECT time, nick, message, channel from log where nick = %s order by time desc limit 1;", (message,)) row = cursor.fetchone() if row == None: say(channel, "No record of {}".format(message)) else: time = row[0] nick = row[1] msg = row[2] chan = row[3] say(channel, '{} was last seen on {} saying "{}" in {}'.format(nick, time, msg, chan)) # 3 (minutes ago) on 4, 1, 2 ,0 #
72068701db46dc3d66cde295187b7d167cbfd880
gather/account/api.py
gather/account/api.py
from flask import g, jsonify from gather.account.models import Account from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager __all__ = ["bp"] def patch_single_preprocessor(instance_id=None, data=None, **kw): """Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ return g.token_user.id == instance_id # 需要一点小 hack .. bp = api_manager.create_api_blueprint( Account, methods=["GET", "PUT"], preprocessors=dict(PUT_SINGLE=[need_auth, patch_single_preprocessor],), exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/account/authorize/", methods=["POST"]) def _account_authorize(): from .forms import LoginForm form = LoginForm() if not form.validate_on_submit(): return jsonify( code=400, msg="Wrong username/password" ) user = form.user if not user.api_token: user.generate_api_token() return jsonify( code=200, token=user.api_token )
from flask import g, jsonify, request from gather.account.models import Account from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager __all__ = ["bp"] def patch_single_preprocessor(instance_id=None, data=None, **kw): """Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ return g.token_user.id == instance_id # 需要一点小 hack .. bp = api_manager.create_api_blueprint( Account, methods=["GET", "PUT"], preprocessors=dict(PUT_SINGLE=[need_auth, patch_single_preprocessor],), exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/account/authorize/", methods=["POST"]) def _account_authorize(): from .forms import LoginForm form = LoginForm() if not form.validate_on_submit(): return jsonify( code=400, msg="Wrong username/password" ) user = form.user if not user.api_token: user.generate_api_token() return jsonify( code=200, token=user.api_token ) @bp.route("/account/change_password/", methods=["POST"]) def _change_password(): new_password = request.form["password"] user = Account.query.filter_by(username="Madimo").first_or_404() user.change_password(new_password) user.save return jsonify( code=200, user=user )
Add API to change password
Add API to change password
Python
mit
whtsky/Gather,whtsky/Gather
- from flask import g, jsonify + from flask import g, jsonify, request from gather.account.models import Account from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager __all__ = ["bp"] def patch_single_preprocessor(instance_id=None, data=None, **kw): """Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ return g.token_user.id == instance_id # 需要一点小 hack .. bp = api_manager.create_api_blueprint( Account, methods=["GET", "PUT"], preprocessors=dict(PUT_SINGLE=[need_auth, patch_single_preprocessor],), exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/account/authorize/", methods=["POST"]) def _account_authorize(): from .forms import LoginForm form = LoginForm() if not form.validate_on_submit(): return jsonify( code=400, msg="Wrong username/password" ) user = form.user if not user.api_token: user.generate_api_token() return jsonify( code=200, token=user.api_token ) + + @bp.route("/account/change_password/", methods=["POST"]) + def _change_password(): + new_password = request.form["password"] + user = Account.query.filter_by(username="Madimo").first_or_404() + user.change_password(new_password) + user.save + return jsonify( + code=200, + user=user + ) +
Add API to change password
## Code Before: from flask import g, jsonify from gather.account.models import Account from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager __all__ = ["bp"] def patch_single_preprocessor(instance_id=None, data=None, **kw): """Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ return g.token_user.id == instance_id # 需要一点小 hack .. bp = api_manager.create_api_blueprint( Account, methods=["GET", "PUT"], preprocessors=dict(PUT_SINGLE=[need_auth, patch_single_preprocessor],), exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/account/authorize/", methods=["POST"]) def _account_authorize(): from .forms import LoginForm form = LoginForm() if not form.validate_on_submit(): return jsonify( code=400, msg="Wrong username/password" ) user = form.user if not user.api_token: user.generate_api_token() return jsonify( code=200, token=user.api_token ) ## Instruction: Add API to change password ## Code After: from flask import g, jsonify, request from gather.account.models import Account from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager __all__ = ["bp"] def patch_single_preprocessor(instance_id=None, data=None, **kw): """Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ return g.token_user.id == instance_id # 需要一点小 hack .. bp = api_manager.create_api_blueprint( Account, methods=["GET", "PUT"], preprocessors=dict(PUT_SINGLE=[need_auth, patch_single_preprocessor],), exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/account/authorize/", methods=["POST"]) def _account_authorize(): from .forms import LoginForm form = LoginForm() if not form.validate_on_submit(): return jsonify( code=400, msg="Wrong username/password" ) user = form.user if not user.api_token: user.generate_api_token() return jsonify( code=200, token=user.api_token ) @bp.route("/account/change_password/", methods=["POST"]) def _change_password(): new_password = request.form["password"] user = Account.query.filter_by(username="Madimo").first_or_404() user.change_password(new_password) user.save return jsonify( code=200, user=user )
- from flask import g, jsonify + from flask import g, jsonify, request ? +++++++++ from gather.account.models import Account from gather.api import need_auth, EXCLUDE_COLUMNS from gather.extensions import api_manager __all__ = ["bp"] def patch_single_preprocessor(instance_id=None, data=None, **kw): """Accepts two arguments, `instance_id`, the primary key of the instance of the model to patch, and `data`, the dictionary of fields to change on the instance. """ return g.token_user.id == instance_id # 需要一点小 hack .. bp = api_manager.create_api_blueprint( Account, methods=["GET", "PUT"], preprocessors=dict(PUT_SINGLE=[need_auth, patch_single_preprocessor],), exclude_columns=EXCLUDE_COLUMNS ) @bp.route("/account/authorize/", methods=["POST"]) def _account_authorize(): from .forms import LoginForm form = LoginForm() if not form.validate_on_submit(): return jsonify( code=400, msg="Wrong username/password" ) user = form.user if not user.api_token: user.generate_api_token() return jsonify( code=200, token=user.api_token ) + + + @bp.route("/account/change_password/", methods=["POST"]) + def _change_password(): + new_password = request.form["password"] + user = Account.query.filter_by(username="Madimo").first_or_404() + user.change_password(new_password) + user.save + return jsonify( + code=200, + user=user + )
b0814b95ea854f7b3f0b9db48ae9beee078c2a30
versions/software/openjdk.py
versions/software/openjdk.py
import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def downloadable_version(url): """Strip the version out of the Zulu OpenJDK manual download link.""" # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip filename = url[url.rfind('/') + 1:] jdk_version = get_text_between(filename, '-jdk', '-') version, update = jdk_version.rsplit('.', 1) return f'1.{version}_{update}' def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: div = soup.find('div', class_='latest_area') if div: zip_filename = re.compile('\.zip$') tag = div.find('a', class_='r-download', href=zip_filename) if tag: return downloadable_version(tag.attrs['href']) return 'Unknown'
import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: zip_filename = re.compile('\.zip$') for tag in soup.find_all('a', class_='r-download', href=zip_filename): filename = tag.attrs['href'] zulu = get_text_between(filename, 'bin/zulu', '-') jdk = get_text_between(filename, 'jdk', '-') if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): return zulu elif installed[0] == '1' and jdk[0] == installed[2]: version, update = jdk.rsplit('.', 1) return f'1.{version}_{update}' return 'Unknown'
Update OpenJDK version to support both 8 and 9.
Update OpenJDK version to support both 8 and 9.
Python
mit
mchung94/latest-versions
import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) + # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') except FileNotFoundError: pass - def downloadable_version(url): - """Strip the version out of the Zulu OpenJDK manual download link.""" - # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip - filename = url[url.rfind('/') + 1:] - jdk_version = get_text_between(filename, '-jdk', '-') - version, update = jdk_version.rsplit('.', 1) - return f'1.{version}_{update}' - - def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" + installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: - div = soup.find('div', class_='latest_area') - if div: - zip_filename = re.compile('\.zip$') + zip_filename = re.compile('\.zip$') - tag = div.find('a', class_='r-download', href=zip_filename) + for tag in soup.find_all('a', class_='r-download', href=zip_filename): - if tag: - return downloadable_version(tag.attrs['href']) + filename = tag.attrs['href'] + zulu = get_text_between(filename, 'bin/zulu', '-') + jdk = get_text_between(filename, 'jdk', '-') + if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): + return zulu + elif installed[0] == '1' and jdk[0] == installed[2]: + version, update = jdk.rsplit('.', 1) + return f'1.{version}_{update}' return 'Unknown'
Update OpenJDK version to support both 8 and 9.
## Code Before: import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def downloadable_version(url): """Strip the version out of the Zulu OpenJDK manual download link.""" # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip filename = url[url.rfind('/') + 1:] jdk_version = get_text_between(filename, '-jdk', '-') version, update = jdk_version.rsplit('.', 1) return f'1.{version}_{update}' def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: div = soup.find('div', class_='latest_area') if div: zip_filename = re.compile('\.zip$') tag = div.find('a', class_='r-download', href=zip_filename) if tag: return downloadable_version(tag.attrs['href']) return 'Unknown' ## Instruction: Update OpenJDK version to support both 8 and 9. ## Code After: import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') except FileNotFoundError: pass def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: zip_filename = re.compile('\.zip$') for tag in soup.find_all('a', class_='r-download', href=zip_filename): filename = tag.attrs['href'] zulu = get_text_between(filename, 'bin/zulu', '-') jdk = get_text_between(filename, 'jdk', '-') if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): return zulu elif installed[0] == '1' and jdk[0] == installed[2]: version, update = jdk.rsplit('.', 1) return f'1.{version}_{update}' return 'Unknown'
import re from versions.software.utils import get_command_stderr, get_soup, \ get_text_between def name(): """Return the precise name for the software.""" return 'Zulu OpenJDK' def installed_version(): """Return the installed version of the jdk, or None if not installed.""" try: version_string = get_command_stderr(('java', '-version')) + # "1.8.0_162" or "9.0.4.1" for example return get_text_between(version_string, '"', '"') except FileNotFoundError: pass - def downloadable_version(url): - """Strip the version out of the Zulu OpenJDK manual download link.""" - # example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip - filename = url[url.rfind('/') + 1:] - jdk_version = get_text_between(filename, '-jdk', '-') - version, update = jdk_version.rsplit('.', 1) - return f'1.{version}_{update}' - - def latest_version(): """Return the latest version of Zulu OpenJDK available for download.""" + installed = installed_version() soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/') if soup: - div = soup.find('div', class_='latest_area') - if div: - zip_filename = re.compile('\.zip$') ? ---- + zip_filename = re.compile('\.zip$') - tag = div.find('a', class_='r-download', href=zip_filename) ? ^^^ ^ ^^^ + for tag in soup.find_all('a', class_='r-download', href=zip_filename): ? ^^^ ^^ ^^^^ ++++ + - if tag: - return downloadable_version(tag.attrs['href']) + filename = tag.attrs['href'] + zulu = get_text_between(filename, 'bin/zulu', '-') + jdk = get_text_between(filename, 'jdk', '-') + if (installed is None) or (installed[0] == '9' and zulu[0] == '9'): + return zulu + elif installed[0] == '1' and jdk[0] == installed[2]: + version, update = jdk.rsplit('.', 1) + return f'1.{version}_{update}' return 'Unknown'
b974bbcc7e243fca7c3dc63fbbaf530fe9b69e50
runtests.py
runtests.py
import sys try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } }, INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", "django.contrib.sites", "chartit", "chartit_tests", ], SITE_ID=1, MIDDLEWARE_CLASSES=(), STATIC_URL='/static/' ) try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] # Run tests TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == "__main__": run_tests(*sys.argv[1:])
import os import sys try: sys.path.append('demoproject') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") from django.conf import settings from django.core.management import call_command settings.DATABASES['default']['NAME'] = ':memory:' settings.INSTALLED_APPS.append('chartit_tests') try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] # ./manage.py test takes care of database creation and # application of migrations if any result = call_command('test', *test_args, verbosity=2, failfast=True) sys.exit(result) if __name__ == "__main__": run_tests(*sys.argv[1:])
Load DB migrations before testing and use verbose=2 and failfast
Load DB migrations before testing and use verbose=2 and failfast Note that we use `manage.py test` instead of `manage.py migrate` and manually running the tests. This lets Django take care of applying migrations before running tests. This works around https://code.djangoproject.com/ticket/22487 which causes a test failure on Django 1.8.14. In 1.8.14 somehow we end up without any actual data in the test DB and one of the tests fails if we use `manage.py migrate` and run the tests manually via TestRunner.
Python
bsd-2-clause
pgollakota/django-chartit,pgollakota/django-chartit,pgollakota/django-chartit
+ import os import sys try: + sys.path.append('demoproject') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") + from django.conf import settings - from django.test.utils import get_runner + from django.core.management import call_command + settings.DATABASES['default']['NAME'] = ':memory:' + settings.INSTALLED_APPS.append('chartit_tests') - settings.configure( - DEBUG=True, - USE_TZ=True, - DATABASES={ - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": ":memory:", - } - }, - INSTALLED_APPS=[ - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.staticfiles", - "django.contrib.sites", - "chartit", - "chartit_tests", - ], - SITE_ID=1, - MIDDLEWARE_CLASSES=(), - STATIC_URL='/static/' - ) try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] + # ./manage.py test takes care of database creation and + # application of migrations if any + result = call_command('test', *test_args, verbosity=2, failfast=True) + sys.exit(result) - # Run tests - TestRunner = get_runner(settings) - test_runner = TestRunner() - - failures = test_runner.run_tests(test_args) - - if failures: - sys.exit(bool(failures)) if __name__ == "__main__": run_tests(*sys.argv[1:])
Load DB migrations before testing and use verbose=2 and failfast
## Code Before: import sys try: from django.conf import settings from django.test.utils import get_runner settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } }, INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.staticfiles", "django.contrib.sites", "chartit", "chartit_tests", ], SITE_ID=1, MIDDLEWARE_CLASSES=(), STATIC_URL='/static/' ) try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] # Run tests TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(test_args) if failures: sys.exit(bool(failures)) if __name__ == "__main__": run_tests(*sys.argv[1:]) ## Instruction: Load DB migrations before testing and use verbose=2 and failfast ## Code After: import os import sys try: sys.path.append('demoproject') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") from django.conf import settings from django.core.management import call_command settings.DATABASES['default']['NAME'] = ':memory:' settings.INSTALLED_APPS.append('chartit_tests') try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] # ./manage.py test takes care of database creation and # application of migrations if any result = call_command('test', *test_args, verbosity=2, failfast=True) sys.exit(result) if __name__ == "__main__": run_tests(*sys.argv[1:])
+ import os import sys try: + sys.path.append('demoproject') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "demoproject.settings") + from django.conf import settings - from django.test.utils import get_runner + from django.core.management import call_command + settings.DATABASES['default']['NAME'] = ':memory:' + settings.INSTALLED_APPS.append('chartit_tests') - settings.configure( - DEBUG=True, - USE_TZ=True, - DATABASES={ - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": ":memory:", - } - }, - INSTALLED_APPS=[ - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.staticfiles", - "django.contrib.sites", - "chartit", - "chartit_tests", - ], - SITE_ID=1, - MIDDLEWARE_CLASSES=(), - STATIC_URL='/static/' - ) try: import django setup = django.setup except AttributeError: pass else: setup() except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements.txt") def run_tests(*test_args): if not test_args: test_args = ["chartit_tests"] + # ./manage.py test takes care of database creation and + # application of migrations if any + result = call_command('test', *test_args, verbosity=2, failfast=True) + sys.exit(result) - # Run tests - TestRunner = get_runner(settings) - test_runner = TestRunner() - - failures = test_runner.run_tests(test_args) - - if failures: - sys.exit(bool(failures)) if __name__ == "__main__": run_tests(*sys.argv[1:])
45542f012b3dc6d089bc991529523a4ea6401b35
br_rss/boilerroomtv/management/commands/scrape_all.py
br_rss/boilerroomtv/management/commands/scrape_all.py
from django.core.management.base import BaseCommand from boilerroomtv.tasks import scrape_genres, scrape_all_recordings, scrape_channels class Command(BaseCommand): def handle(self, *args, **options): scrape_genres() scrape_all_recordings() scrape_channels()
from django.core.management.base import BaseCommand from ...tasks import scrape_genres, scrape_all_recordings, scrape_channels class Command(BaseCommand): def handle(self, *args, **options): scrape_genres() scrape_all_recordings() scrape_channels()
Use relative import in command.
Use relative import in command.
Python
mpl-2.0
jeffbr13/br-rss,jeffbr13/br-rss
from django.core.management.base import BaseCommand - from boilerroomtv.tasks import scrape_genres, scrape_all_recordings, scrape_channels + from ...tasks import scrape_genres, scrape_all_recordings, scrape_channels class Command(BaseCommand): def handle(self, *args, **options): scrape_genres() scrape_all_recordings() scrape_channels()
Use relative import in command.
## Code Before: from django.core.management.base import BaseCommand from boilerroomtv.tasks import scrape_genres, scrape_all_recordings, scrape_channels class Command(BaseCommand): def handle(self, *args, **options): scrape_genres() scrape_all_recordings() scrape_channels() ## Instruction: Use relative import in command. ## Code After: from django.core.management.base import BaseCommand from ...tasks import scrape_genres, scrape_all_recordings, scrape_channels class Command(BaseCommand): def handle(self, *args, **options): scrape_genres() scrape_all_recordings() scrape_channels()
from django.core.management.base import BaseCommand - from boilerroomtv.tasks import scrape_genres, scrape_all_recordings, scrape_channels ? ^^^^^^^^^^^^ + from ...tasks import scrape_genres, scrape_all_recordings, scrape_channels ? ^^ class Command(BaseCommand): def handle(self, *args, **options): scrape_genres() scrape_all_recordings() scrape_channels()
409722f0e075385e05a77513f6dbd9c3b540bfac
txpoloniex/const.py
txpoloniex/const.py
PUBLIC_API = 'https://poloniex.com/public' PRIVATE_API = 'https://poloniex.com/tradingApi' PUBLIC_COMMANDS = [ 'returnTicker', 'return24hVolume', 'returnOrderBook', 'returnTradeHistory', 'returnChartData', 'returnCurrencies', 'returnLoanOrders', ] PRIVATE_COMMANDS = [ 'returnBalances', 'returnCompleteBalances', 'returnDepositAddresses', 'generateNewAddress', 'returnDepositsWithdrawals', 'returnOpenOrders', 'returnTradeHistory', 'returnAvailableAccountBalances', 'returnTradableBalances', 'returnOpenLoanOffers', 'returnOrderTrades', 'returnActiveLoans', 'returnLendingHistory', 'createLoanOffer', 'cancelLoanOffer', 'toggleAutoRenew', 'buy', 'sell', 'cancelOrder', 'moveOrder', 'withdraw', 'returnFeeInfo', 'transferBalance', 'returnMarginAccountSummary', 'marginBuy', 'marginSell', 'getMarginPosition', 'closeMarginPosition', ]
PUBLIC_API = 'https://poloniex.com/public' PRIVATE_API = 'https://poloniex.com/tradingApi' PUBLIC_COMMANDS = [ 'returnTicker', 'return24hVolume', 'returnOrderBook', 'returnTradeHistory', 'returnChartData', 'returnCurrencies', 'returnLoanOrders', ] PRIVATE_COMMANDS = [ 'returnBalances', 'returnCompleteBalances', 'returnDepositAddresses', 'generateNewAddress', 'returnDepositsWithdrawals', 'returnOpenOrders', 'returnTradeHistory', 'returnAvailableAccountBalances', 'returnTradableBalances', 'returnOpenLoanOffers', 'returnOrderTrades', 'returnActiveLoans', 'returnLendingHistory', 'createLoanOffer', 'cancelLoanOffer', 'toggleAutoRenew', 'buy', 'sell', 'cancelOrder', 'moveOrder', 'withdraw', 'returnFeeInfo', 'transferBalance', 'returnMarginAccountSummary', 'marginBuy', 'marginSell', 'getMarginPosition', 'closeMarginPosition', ] DATE_FORMAT='%Y-%m-%d %H:%M:%S'
Add DATE_FORMAT for parsing any datetime strings
Add DATE_FORMAT for parsing any datetime strings Poloniex seems to use a fixed output format for datetime strings
Python
apache-2.0
congruency/txpoloniex
PUBLIC_API = 'https://poloniex.com/public' PRIVATE_API = 'https://poloniex.com/tradingApi' PUBLIC_COMMANDS = [ 'returnTicker', 'return24hVolume', 'returnOrderBook', 'returnTradeHistory', 'returnChartData', 'returnCurrencies', 'returnLoanOrders', ] PRIVATE_COMMANDS = [ 'returnBalances', 'returnCompleteBalances', 'returnDepositAddresses', 'generateNewAddress', 'returnDepositsWithdrawals', 'returnOpenOrders', 'returnTradeHistory', 'returnAvailableAccountBalances', 'returnTradableBalances', 'returnOpenLoanOffers', 'returnOrderTrades', 'returnActiveLoans', 'returnLendingHistory', 'createLoanOffer', 'cancelLoanOffer', 'toggleAutoRenew', 'buy', 'sell', 'cancelOrder', 'moveOrder', 'withdraw', 'returnFeeInfo', 'transferBalance', 'returnMarginAccountSummary', 'marginBuy', 'marginSell', 'getMarginPosition', 'closeMarginPosition', ] + DATE_FORMAT='%Y-%m-%d %H:%M:%S' +
Add DATE_FORMAT for parsing any datetime strings
## Code Before: PUBLIC_API = 'https://poloniex.com/public' PRIVATE_API = 'https://poloniex.com/tradingApi' PUBLIC_COMMANDS = [ 'returnTicker', 'return24hVolume', 'returnOrderBook', 'returnTradeHistory', 'returnChartData', 'returnCurrencies', 'returnLoanOrders', ] PRIVATE_COMMANDS = [ 'returnBalances', 'returnCompleteBalances', 'returnDepositAddresses', 'generateNewAddress', 'returnDepositsWithdrawals', 'returnOpenOrders', 'returnTradeHistory', 'returnAvailableAccountBalances', 'returnTradableBalances', 'returnOpenLoanOffers', 'returnOrderTrades', 'returnActiveLoans', 'returnLendingHistory', 'createLoanOffer', 'cancelLoanOffer', 'toggleAutoRenew', 'buy', 'sell', 'cancelOrder', 'moveOrder', 'withdraw', 'returnFeeInfo', 'transferBalance', 'returnMarginAccountSummary', 'marginBuy', 'marginSell', 'getMarginPosition', 'closeMarginPosition', ] ## Instruction: Add DATE_FORMAT for parsing any datetime strings ## Code After: PUBLIC_API = 'https://poloniex.com/public' PRIVATE_API = 'https://poloniex.com/tradingApi' PUBLIC_COMMANDS = [ 'returnTicker', 'return24hVolume', 'returnOrderBook', 'returnTradeHistory', 'returnChartData', 'returnCurrencies', 'returnLoanOrders', ] PRIVATE_COMMANDS = [ 'returnBalances', 'returnCompleteBalances', 'returnDepositAddresses', 'generateNewAddress', 'returnDepositsWithdrawals', 'returnOpenOrders', 'returnTradeHistory', 'returnAvailableAccountBalances', 'returnTradableBalances', 'returnOpenLoanOffers', 'returnOrderTrades', 'returnActiveLoans', 'returnLendingHistory', 'createLoanOffer', 'cancelLoanOffer', 'toggleAutoRenew', 'buy', 'sell', 'cancelOrder', 'moveOrder', 'withdraw', 'returnFeeInfo', 'transferBalance', 'returnMarginAccountSummary', 'marginBuy', 'marginSell', 'getMarginPosition', 'closeMarginPosition', ] DATE_FORMAT='%Y-%m-%d %H:%M:%S'
PUBLIC_API = 'https://poloniex.com/public' PRIVATE_API = 'https://poloniex.com/tradingApi' PUBLIC_COMMANDS = [ 'returnTicker', 'return24hVolume', 'returnOrderBook', 'returnTradeHistory', 'returnChartData', 'returnCurrencies', 'returnLoanOrders', ] PRIVATE_COMMANDS = [ 'returnBalances', 'returnCompleteBalances', 'returnDepositAddresses', 'generateNewAddress', 'returnDepositsWithdrawals', 'returnOpenOrders', 'returnTradeHistory', 'returnAvailableAccountBalances', 'returnTradableBalances', 'returnOpenLoanOffers', 'returnOrderTrades', 'returnActiveLoans', 'returnLendingHistory', 'createLoanOffer', 'cancelLoanOffer', 'toggleAutoRenew', 'buy', 'sell', 'cancelOrder', 'moveOrder', 'withdraw', 'returnFeeInfo', 'transferBalance', 'returnMarginAccountSummary', 'marginBuy', 'marginSell', 'getMarginPosition', 'closeMarginPosition', ] + + DATE_FORMAT='%Y-%m-%d %H:%M:%S'
a816d0655504051ea12718a0e34bc9645fc92730
personal-site/projects/views.py
personal-site/projects/views.py
from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['projects'] = Project.objects.order_by('priority') context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
Remove unnecessary projects added to context
Remove unnecessary projects added to context
Python
bsd-3-clause
brandonw/personal-site,brandonw/personal-site,brandonw/personal-site
from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) - context['projects'] = Project.objects.order_by('priority') context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
Remove unnecessary projects added to context
## Code Before: from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['projects'] = Project.objects.order_by('priority') context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context ## Instruction: Remove unnecessary projects added to context ## Code After: from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
from django.shortcuts import render from django.views.generic.detail import DetailView from projects.models import Project from misc.code_blocks_preprocessor import CodeBlockExtension import markdown class ProjectDetailView(DetailView): model = Project context_object_name='project' template_name = 'projects/detail.html' def get_context_data(self, **kwargs): context = super(ProjectDetailView, self).get_context_data(**kwargs) - context['projects'] = Project.objects.order_by('priority') context['html'] = markdown.markdown( context['object'].full_descr, extensions=[CodeBlockExtension()]) return context
43b00bdb18131c49a6e52d752aeb0549298d8cda
avena/tests/test-image.py
avena/tests/test-image.py
from numpy import all, array, dstack from .. import image def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) if __name__ == '__main__': pass
from numpy import all, allclose, array, dstack from os import remove from os.path import sep, split from .. import image, utils def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) def test_read_save(): f = split(__file__)[0] + sep + 'drink.png' x = image.read(f) tmp = utils.rand_filename(f) try: image.save(x, tmp) y = image.read(tmp) assert allclose(x, y, rtol=1e-4, atol=1e-1) finally: remove(tmp) if __name__ == '__main__': pass
Add more unit tests for the image module.
Add more unit tests for the image module.
Python
isc
eliteraspberries/avena
- from numpy import all, array, dstack + from numpy import all, allclose, array, dstack + from os import remove + from os.path import sep, split - from .. import image + from .. import image, utils def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) + def test_read_save(): + f = split(__file__)[0] + sep + 'drink.png' + x = image.read(f) + tmp = utils.rand_filename(f) + try: + image.save(x, tmp) + y = image.read(tmp) + assert allclose(x, y, rtol=1e-4, atol=1e-1) + finally: + remove(tmp) + + if __name__ == '__main__': pass
Add more unit tests for the image module.
## Code Before: from numpy import all, array, dstack from .. import image def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) if __name__ == '__main__': pass ## Instruction: Add more unit tests for the image module. ## Code After: from numpy import all, allclose, array, dstack from os import remove from os.path import sep, split from .. import image, utils def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) def test_read_save(): f = split(__file__)[0] + sep + 'drink.png' x = image.read(f) tmp = utils.rand_filename(f) try: image.save(x, tmp) y = image.read(tmp) assert allclose(x, y, rtol=1e-4, atol=1e-1) finally: remove(tmp) if __name__ == '__main__': pass
- from numpy import all, array, dstack + from numpy import all, allclose, array, dstack ? ++++++++++ + from os import remove + from os.path import sep, split - from .. import image + from .. import image, utils ? +++++++ def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) + def test_read_save(): + f = split(__file__)[0] + sep + 'drink.png' + x = image.read(f) + tmp = utils.rand_filename(f) + try: + image.save(x, tmp) + y = image.read(tmp) + assert allclose(x, y, rtol=1e-4, atol=1e-1) + finally: + remove(tmp) + + if __name__ == '__main__': pass
0eabc95105fecfd4b960b1c135f589f0eea9de2a
flaskrst/modules/staticpages/__init__.py
flaskrst/modules/staticpages/__init__.py
import os from flask import current_app, render_template from flaskrst.parsers import rstDocument from flaskrst.modules import Blueprint static_pages = Blueprint('static_pages', __name__, \ template_folder='templates') @static_pages.route('/', defaults={'file_name': 'index'}) @static_pages.route('/<file_name>') def show(file_name): rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst') rst = rstDocument(rst_file) return render_template("static_page.html", page=rst) def setup(app, cfg): app.register_blueprint(static_pages)
import os from flask import current_app, render_template from flaskrst.parsers import rstDocument from flaskrst.modules import Blueprint static_pages = Blueprint('static_pages', __name__, \ template_folder='templates') @static_pages.route('/', defaults={'file_path': 'index'}) @static_pages.route('/<path:file_path>') def show(file_path): if file_path.endswith('/'): file_path += "index" rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst') rst = rstDocument(rst_file) return render_template("static_page.html", page=rst) def setup(app, cfg): app.register_blueprint(static_pages)
Support of static pages inside of a directory
Support of static pages inside of a directory
Python
bsd-3-clause
jarus/flask-rst
import os from flask import current_app, render_template from flaskrst.parsers import rstDocument from flaskrst.modules import Blueprint static_pages = Blueprint('static_pages', __name__, \ template_folder='templates') - @static_pages.route('/', defaults={'file_name': 'index'}) + @static_pages.route('/', defaults={'file_path': 'index'}) - @static_pages.route('/<file_name>') + @static_pages.route('/<path:file_path>') - def show(file_name): + def show(file_path): + if file_path.endswith('/'): + file_path += "index" - rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst') + rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst') rst = rstDocument(rst_file) return render_template("static_page.html", page=rst) def setup(app, cfg): app.register_blueprint(static_pages)
Support of static pages inside of a directory
## Code Before: import os from flask import current_app, render_template from flaskrst.parsers import rstDocument from flaskrst.modules import Blueprint static_pages = Blueprint('static_pages', __name__, \ template_folder='templates') @static_pages.route('/', defaults={'file_name': 'index'}) @static_pages.route('/<file_name>') def show(file_name): rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst') rst = rstDocument(rst_file) return render_template("static_page.html", page=rst) def setup(app, cfg): app.register_blueprint(static_pages) ## Instruction: Support of static pages inside of a directory ## Code After: import os from flask import current_app, render_template from flaskrst.parsers import rstDocument from flaskrst.modules import Blueprint static_pages = Blueprint('static_pages', __name__, \ template_folder='templates') @static_pages.route('/', defaults={'file_path': 'index'}) @static_pages.route('/<path:file_path>') def show(file_path): if file_path.endswith('/'): file_path += "index" rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst') rst = rstDocument(rst_file) return render_template("static_page.html", page=rst) def setup(app, cfg): app.register_blueprint(static_pages)
import os from flask import current_app, render_template from flaskrst.parsers import rstDocument from flaskrst.modules import Blueprint static_pages = Blueprint('static_pages', __name__, \ template_folder='templates') - @static_pages.route('/', defaults={'file_name': 'index'}) ? ^ ^^ + @static_pages.route('/', defaults={'file_path': 'index'}) ? ^ ^^ - @static_pages.route('/<file_name>') ? ^ ^^ + @static_pages.route('/<path:file_path>') ? +++++ ^ ^^ - def show(file_name): ? ^ ^^ + def show(file_path): ? ^ ^^ + if file_path.endswith('/'): + file_path += "index" - rst_file = os.path.join(current_app.config['SOURCE'], file_name + '.rst') ? ^ ^^ + rst_file = os.path.join(current_app.config['SOURCE'], file_path + '.rst') ? ^ ^^ rst = rstDocument(rst_file) return render_template("static_page.html", page=rst) def setup(app, cfg): app.register_blueprint(static_pages)
cebd4f1ee9a87cc2652ebf8981df20121ec257b2
steel/fields/numbers.py
steel/fields/numbers.py
import struct from steel.fields import Field __all__ = ['Integer'] class Integer(Field): "An integer represented as a sequence and bytes" # These map a number of bytes to a struct format code size_formats = { 1: 'B', # char 2: 'H', # short 4: 'L', # long 8: 'Q', # long long } def __init__(self, *args, endianness='<', **kwargs): super(Integer, self).__init__(*args, **kwargs) self.format_code = endianness + self.size_formats[self.size] def encode(self, value): return struct.pack(self.format_code, value) def decode(self, value): # The index on the end is because unpack always returns a tuple return struct.unpack(self.format_code, value)[0]
import struct from steel.fields import Field __all__ = ['Integer'] class Integer(Field): "An integer represented as a sequence and bytes" # These map a number of bytes to a struct format code size_formats = { 1: 'B', # char 2: 'H', # short 4: 'L', # long 8: 'Q', # long long } def __init__(self, *args, endianness='<', **kwargs): super(Integer, self).__init__(*args, **kwargs) self.format_code = endianness + self.size_formats[self.size] def encode(self, value): try: return struct.pack(self.format_code, value) except struct.error as e: raise ValueError(*e.args) def decode(self, value): # The index on the end is because unpack always returns a tuple try: return struct.unpack(self.format_code, value)[0] except struct.error as e: raise ValueError(*e.args)
Raise ValueError instead of struct.error
Raise ValueError instead of struct.error The protocol for field encoding and decoding is to raise ValueError, so this is a necessary translation.
Python
bsd-3-clause
gulopine/steel-experiment
import struct from steel.fields import Field __all__ = ['Integer'] class Integer(Field): "An integer represented as a sequence and bytes" # These map a number of bytes to a struct format code size_formats = { 1: 'B', # char 2: 'H', # short 4: 'L', # long 8: 'Q', # long long } def __init__(self, *args, endianness='<', **kwargs): super(Integer, self).__init__(*args, **kwargs) self.format_code = endianness + self.size_formats[self.size] def encode(self, value): + try: - return struct.pack(self.format_code, value) + return struct.pack(self.format_code, value) + except struct.error as e: + raise ValueError(*e.args) def decode(self, value): # The index on the end is because unpack always returns a tuple + try: - return struct.unpack(self.format_code, value)[0] + return struct.unpack(self.format_code, value)[0] + except struct.error as e: + raise ValueError(*e.args)
Raise ValueError instead of struct.error
## Code Before: import struct from steel.fields import Field __all__ = ['Integer'] class Integer(Field): "An integer represented as a sequence and bytes" # These map a number of bytes to a struct format code size_formats = { 1: 'B', # char 2: 'H', # short 4: 'L', # long 8: 'Q', # long long } def __init__(self, *args, endianness='<', **kwargs): super(Integer, self).__init__(*args, **kwargs) self.format_code = endianness + self.size_formats[self.size] def encode(self, value): return struct.pack(self.format_code, value) def decode(self, value): # The index on the end is because unpack always returns a tuple return struct.unpack(self.format_code, value)[0] ## Instruction: Raise ValueError instead of struct.error ## Code After: import struct from steel.fields import Field __all__ = ['Integer'] class Integer(Field): "An integer represented as a sequence and bytes" # These map a number of bytes to a struct format code size_formats = { 1: 'B', # char 2: 'H', # short 4: 'L', # long 8: 'Q', # long long } def __init__(self, *args, endianness='<', **kwargs): super(Integer, self).__init__(*args, **kwargs) self.format_code = endianness + self.size_formats[self.size] def encode(self, value): try: return struct.pack(self.format_code, value) except struct.error as e: raise ValueError(*e.args) def decode(self, value): # The index on the end is because unpack always returns a tuple try: return struct.unpack(self.format_code, value)[0] except struct.error as e: raise ValueError(*e.args)
import struct from steel.fields import Field __all__ = ['Integer'] class Integer(Field): "An integer represented as a sequence and bytes" # These map a number of bytes to a struct format code size_formats = { 1: 'B', # char 2: 'H', # short 4: 'L', # long 8: 'Q', # long long } def __init__(self, *args, endianness='<', **kwargs): super(Integer, self).__init__(*args, **kwargs) self.format_code = endianness + self.size_formats[self.size] def encode(self, value): + try: - return struct.pack(self.format_code, value) + return struct.pack(self.format_code, value) ? ++++ + except struct.error as e: + raise ValueError(*e.args) def decode(self, value): # The index on the end is because unpack always returns a tuple + try: - return struct.unpack(self.format_code, value)[0] + return struct.unpack(self.format_code, value)[0] ? ++++ + except struct.error as e: + raise ValueError(*e.args)
3fbbdec51cfd93217705adcae37b1bf22d5661fa
backend/playlist/serializers.py
backend/playlist/serializers.py
from rest_framework import serializers from .models import Cd, Cdtrack, Show, Playlist, PlaylistEntry class TrackSerializer(serializers.ModelSerializer): album = serializers.StringRelatedField( read_only=True ) class Meta: model = Cdtrack fields = ('trackid', 'url', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'album') class ReleaseSerializer(serializers.HyperlinkedModelSerializer): tracks = serializers.PrimaryKeyRelatedField( many=True, read_only=True ) class Meta: model = Cd fields = ('id', 'url', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks') class ShowSerializer(serializers.ModelSerializer): class Meta: model = Show fields = ('id', 'name', 'startTime', 'endTime', 'defaultHost') class PlaylistEntrySerializer(serializers.ModelSerializer): playlist = serializers.PrimaryKeyRelatedField( queryset = Playlist.objects.all() ) class Meta: model = PlaylistEntry fields = ('id', 'artist','album','title','duration','local','australian','female','newRelease','playlist' ) pass class PlaylistSerializer(serializers.ModelSerializer): entries =PlaylistEntrySerializer( many=True, read_only=True ) class Meta: model = Playlist fields = ('id', 'show', 'host', 'date', 'notes', 'entries')
from rest_framework import serializers from .models import Cd, Cdtrack, Show, Playlist, PlaylistEntry class TrackSerializer(serializers.ModelSerializer): album = serializers.StringRelatedField( read_only=True ) class Meta: model = Cdtrack fields = ('trackid', 'url', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'album') class ReleaseSerializer(serializers.HyperlinkedModelSerializer): tracks = serializers.PrimaryKeyRelatedField( many=True, read_only=True ) class Meta: model = Cd fields = ('id', 'url', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks') class ShowSerializer(serializers.ModelSerializer): class Meta: model = Show fields = ('id', 'name', 'startTime', 'endTime', 'defaultHost') class PlaylistEntrySerializer(serializers.ModelSerializer): playlist = serializers.PrimaryKeyRelatedField( queryset = Playlist.objects.all() ) class Meta: model = PlaylistEntry fields = ('id', 'artist','album','title','duration','local','australian','female','newRelease','playlist' ) pass class PlaylistSerializer(serializers.ModelSerializer): entries =PlaylistEntrySerializer( many=True, read_only=True ) class Meta: model = Playlist fields = ('id', 'show','showname', 'host', 'date', 'notes', 'entries')
Add showname to playlist API view.
Add showname to playlist API view. * Even though it's obsolete now, we need it for old shows.
Python
mit
ThreeDRadio/playlists,ThreeDRadio/playlists,ThreeDRadio/playlists
from rest_framework import serializers from .models import Cd, Cdtrack, Show, Playlist, PlaylistEntry class TrackSerializer(serializers.ModelSerializer): album = serializers.StringRelatedField( read_only=True ) class Meta: model = Cdtrack fields = ('trackid', 'url', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'album') class ReleaseSerializer(serializers.HyperlinkedModelSerializer): tracks = serializers.PrimaryKeyRelatedField( many=True, read_only=True ) class Meta: model = Cd fields = ('id', 'url', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks') class ShowSerializer(serializers.ModelSerializer): class Meta: model = Show fields = ('id', 'name', 'startTime', 'endTime', 'defaultHost') class PlaylistEntrySerializer(serializers.ModelSerializer): playlist = serializers.PrimaryKeyRelatedField( queryset = Playlist.objects.all() ) class Meta: model = PlaylistEntry fields = ('id', 'artist','album','title','duration','local','australian','female','newRelease','playlist' ) pass class PlaylistSerializer(serializers.ModelSerializer): entries =PlaylistEntrySerializer( many=True, read_only=True ) class Meta: model = Playlist - fields = ('id', 'show', 'host', 'date', 'notes', 'entries') + fields = ('id', 'show','showname', 'host', 'date', 'notes', 'entries')
Add showname to playlist API view.
## Code Before: from rest_framework import serializers from .models import Cd, Cdtrack, Show, Playlist, PlaylistEntry class TrackSerializer(serializers.ModelSerializer): album = serializers.StringRelatedField( read_only=True ) class Meta: model = Cdtrack fields = ('trackid', 'url', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'album') class ReleaseSerializer(serializers.HyperlinkedModelSerializer): tracks = serializers.PrimaryKeyRelatedField( many=True, read_only=True ) class Meta: model = Cd fields = ('id', 'url', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks') class ShowSerializer(serializers.ModelSerializer): class Meta: model = Show fields = ('id', 'name', 'startTime', 'endTime', 'defaultHost') class PlaylistEntrySerializer(serializers.ModelSerializer): playlist = serializers.PrimaryKeyRelatedField( queryset = Playlist.objects.all() ) class Meta: model = PlaylistEntry fields = ('id', 'artist','album','title','duration','local','australian','female','newRelease','playlist' ) pass class PlaylistSerializer(serializers.ModelSerializer): entries =PlaylistEntrySerializer( many=True, read_only=True ) class Meta: model = Playlist fields = ('id', 'show', 'host', 'date', 'notes', 'entries') ## Instruction: Add showname to playlist API view. ## Code After: from rest_framework import serializers from .models import Cd, Cdtrack, Show, Playlist, PlaylistEntry class TrackSerializer(serializers.ModelSerializer): album = serializers.StringRelatedField( read_only=True ) class Meta: model = Cdtrack fields = ('trackid', 'url', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'album') class ReleaseSerializer(serializers.HyperlinkedModelSerializer): tracks = serializers.PrimaryKeyRelatedField( many=True, read_only=True ) class Meta: model = Cd fields = ('id', 'url', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks') class ShowSerializer(serializers.ModelSerializer): class Meta: model = Show fields = ('id', 'name', 'startTime', 'endTime', 'defaultHost') class PlaylistEntrySerializer(serializers.ModelSerializer): playlist = serializers.PrimaryKeyRelatedField( queryset = Playlist.objects.all() ) class Meta: model = PlaylistEntry fields = ('id', 'artist','album','title','duration','local','australian','female','newRelease','playlist' ) pass class PlaylistSerializer(serializers.ModelSerializer): entries =PlaylistEntrySerializer( many=True, read_only=True ) class Meta: model = Playlist fields = ('id', 'show','showname', 'host', 'date', 'notes', 'entries')
from rest_framework import serializers from .models import Cd, Cdtrack, Show, Playlist, PlaylistEntry class TrackSerializer(serializers.ModelSerializer): album = serializers.StringRelatedField( read_only=True ) class Meta: model = Cdtrack fields = ('trackid', 'url', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'album') class ReleaseSerializer(serializers.HyperlinkedModelSerializer): tracks = serializers.PrimaryKeyRelatedField( many=True, read_only=True ) class Meta: model = Cd fields = ('id', 'url', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks') class ShowSerializer(serializers.ModelSerializer): class Meta: model = Show fields = ('id', 'name', 'startTime', 'endTime', 'defaultHost') class PlaylistEntrySerializer(serializers.ModelSerializer): playlist = serializers.PrimaryKeyRelatedField( queryset = Playlist.objects.all() ) class Meta: model = PlaylistEntry fields = ('id', 'artist','album','title','duration','local','australian','female','newRelease','playlist' ) pass class PlaylistSerializer(serializers.ModelSerializer): entries =PlaylistEntrySerializer( many=True, read_only=True ) class Meta: model = Playlist - fields = ('id', 'show', 'host', 'date', 'notes', 'entries') + fields = ('id', 'show','showname', 'host', 'date', 'notes', 'entries') ? +++++++++++
bab4f346cef626f29c67cc214b03db2475ef6b64
scriptcore/process/popen.py
scriptcore/process/popen.py
from subprocess import Popen as BasePopen class Popen(BasePopen): def communicate(self, input=None, timeout=None): """ Communicate :param input: Optional input :param timeout: Optional timeout :return: Out, err, exitcode """ out, err = super(Popen, self).communicate(input=input, timeout=timeout) out = out.strip().split('\n') err = err.strip().split('\n') return out, err, self.returncode def is_running(self): """ Running :return: Boolean """ return True if self.poll() is None else False
from subprocess import Popen as BasePopen class Popen(BasePopen): def communicate(self, input=None): """ Communicate :param input: Optional input :return: Out, err, exitcode """ out, err = super(Popen, self).communicate(input=input) out = out.strip().split('\n') err = err.strip().split('\n') return out, err, self.returncode def is_running(self): """ Running :return: Boolean """ return True if self.poll() is None else False
Fix error in communicate function.
Fix error in communicate function.
Python
apache-2.0
LowieHuyghe/script-core
from subprocess import Popen as BasePopen class Popen(BasePopen): - def communicate(self, input=None, timeout=None): + def communicate(self, input=None): """ Communicate :param input: Optional input - :param timeout: Optional timeout :return: Out, err, exitcode """ - out, err = super(Popen, self).communicate(input=input, timeout=timeout) + out, err = super(Popen, self).communicate(input=input) out = out.strip().split('\n') err = err.strip().split('\n') return out, err, self.returncode def is_running(self): """ Running :return: Boolean """ return True if self.poll() is None else False
Fix error in communicate function.
## Code Before: from subprocess import Popen as BasePopen class Popen(BasePopen): def communicate(self, input=None, timeout=None): """ Communicate :param input: Optional input :param timeout: Optional timeout :return: Out, err, exitcode """ out, err = super(Popen, self).communicate(input=input, timeout=timeout) out = out.strip().split('\n') err = err.strip().split('\n') return out, err, self.returncode def is_running(self): """ Running :return: Boolean """ return True if self.poll() is None else False ## Instruction: Fix error in communicate function. ## Code After: from subprocess import Popen as BasePopen class Popen(BasePopen): def communicate(self, input=None): """ Communicate :param input: Optional input :return: Out, err, exitcode """ out, err = super(Popen, self).communicate(input=input) out = out.strip().split('\n') err = err.strip().split('\n') return out, err, self.returncode def is_running(self): """ Running :return: Boolean """ return True if self.poll() is None else False
from subprocess import Popen as BasePopen class Popen(BasePopen): - def communicate(self, input=None, timeout=None): ? -------------- + def communicate(self, input=None): """ Communicate :param input: Optional input - :param timeout: Optional timeout :return: Out, err, exitcode """ - out, err = super(Popen, self).communicate(input=input, timeout=timeout) ? ----------------- + out, err = super(Popen, self).communicate(input=input) out = out.strip().split('\n') err = err.strip().split('\n') return out, err, self.returncode def is_running(self): """ Running :return: Boolean """ return True if self.poll() is None else False
1ba4d84fb72a343cdf288d905d2029f1d2fbee12
wagtail/api/v2/pagination.py
wagtail/api/v2/pagination.py
from collections import OrderedDict from django.conf import settings from rest_framework.pagination import BasePagination from rest_framework.response import Response from .utils import BadRequestError class WagtailPagination(BasePagination): def paginate_queryset(self, queryset, request, view=None): limit_max = getattr(settings, 'WAGTAILAPI_LIMIT_MAX', 20) try: offset = int(request.GET.get('offset', 0)) assert offset >= 0 except (ValueError, AssertionError): raise BadRequestError("offset must be a positive integer") try: limit_default = 20 if not limit_max else min(20, limit_max) limit = int(request.GET.get('limit', limit_default)) if limit_max and limit > limit_max: raise BadRequestError("limit cannot be higher than %d" % limit_max) assert limit >= 0 except (ValueError, AssertionError): raise BadRequestError("limit must be a positive integer") start = offset stop = offset + limit self.view = view self.total_count = queryset.count() return queryset[start:stop] def get_paginated_response(self, data): data = OrderedDict([ ('meta', OrderedDict([ ('total_count', self.total_count), ])), ('items', data), ]) return Response(data)
from collections import OrderedDict from django.conf import settings from rest_framework.pagination import BasePagination from rest_framework.response import Response from .utils import BadRequestError class WagtailPagination(BasePagination): def paginate_queryset(self, queryset, request, view=None): limit_max = getattr(settings, 'WAGTAILAPI_LIMIT_MAX', 20) try: offset = int(request.GET.get('offset', 0)) if offset < 0: raise ValueError() except ValueError: raise BadRequestError("offset must be a positive integer") try: limit_default = 20 if not limit_max else min(20, limit_max) limit = int(request.GET.get('limit', limit_default)) if limit < 0: raise ValueError() except ValueError: raise BadRequestError("limit must be a positive integer") if limit_max and limit > limit_max: raise BadRequestError( "limit cannot be higher than %d" % limit_max) start = offset stop = offset + limit self.view = view self.total_count = queryset.count() return queryset[start:stop] def get_paginated_response(self, data): data = OrderedDict([ ('meta', OrderedDict([ ('total_count', self.total_count), ])), ('items', data), ]) return Response(data)
Remove assert from WagtailPagination.paginate_queryset method
Remove assert from WagtailPagination.paginate_queryset method
Python
bsd-3-clause
mikedingjan/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,jnns/wagtail,mixxorz/wagtail,wagtail/wagtail,mixxorz/wagtail,FlipperPA/wagtail,wagtail/wagtail,gasman/wagtail,zerolab/wagtail,torchbox/wagtail,mikedingjan/wagtail,timorieber/wagtail,zerolab/wagtail,gasman/wagtail,jnns/wagtail,zerolab/wagtail,kaedroho/wagtail,thenewguy/wagtail,FlipperPA/wagtail,nimasmi/wagtail,thenewguy/wagtail,mixxorz/wagtail,torchbox/wagtail,mikedingjan/wagtail,mixxorz/wagtail,rsalmaso/wagtail,kaedroho/wagtail,nealtodd/wagtail,takeflight/wagtail,mixxorz/wagtail,nimasmi/wagtail,torchbox/wagtail,kaedroho/wagtail,jnns/wagtail,mikedingjan/wagtail,timorieber/wagtail,nealtodd/wagtail,wagtail/wagtail,nealtodd/wagtail,thenewguy/wagtail,thenewguy/wagtail,thenewguy/wagtail,FlipperPA/wagtail,takeflight/wagtail,gasman/wagtail,zerolab/wagtail,torchbox/wagtail,takeflight/wagtail,nimasmi/wagtail,FlipperPA/wagtail,wagtail/wagtail,zerolab/wagtail,jnns/wagtail,rsalmaso/wagtail,nimasmi/wagtail,timorieber/wagtail,takeflight/wagtail,gasman/wagtail,kaedroho/wagtail,rsalmaso/wagtail,gasman/wagtail,timorieber/wagtail,wagtail/wagtail,kaedroho/wagtail,nealtodd/wagtail
from collections import OrderedDict from django.conf import settings from rest_framework.pagination import BasePagination from rest_framework.response import Response from .utils import BadRequestError class WagtailPagination(BasePagination): def paginate_queryset(self, queryset, request, view=None): limit_max = getattr(settings, 'WAGTAILAPI_LIMIT_MAX', 20) try: offset = int(request.GET.get('offset', 0)) - assert offset >= 0 + if offset < 0: - except (ValueError, AssertionError): + raise ValueError() + except ValueError: raise BadRequestError("offset must be a positive integer") try: limit_default = 20 if not limit_max else min(20, limit_max) limit = int(request.GET.get('limit', limit_default)) + if limit < 0: + raise ValueError() + except ValueError: + raise BadRequestError("limit must be a positive integer") - if limit_max and limit > limit_max: + if limit_max and limit > limit_max: + raise BadRequestError( - raise BadRequestError("limit cannot be higher than %d" % limit_max) + "limit cannot be higher than %d" % limit_max) - - assert limit >= 0 - except (ValueError, AssertionError): - raise BadRequestError("limit must be a positive integer") start = offset stop = offset + limit self.view = view self.total_count = queryset.count() return queryset[start:stop] def get_paginated_response(self, data): data = OrderedDict([ ('meta', OrderedDict([ ('total_count', self.total_count), ])), ('items', data), ]) return Response(data)
Remove assert from WagtailPagination.paginate_queryset method
## Code Before: from collections import OrderedDict from django.conf import settings from rest_framework.pagination import BasePagination from rest_framework.response import Response from .utils import BadRequestError class WagtailPagination(BasePagination): def paginate_queryset(self, queryset, request, view=None): limit_max = getattr(settings, 'WAGTAILAPI_LIMIT_MAX', 20) try: offset = int(request.GET.get('offset', 0)) assert offset >= 0 except (ValueError, AssertionError): raise BadRequestError("offset must be a positive integer") try: limit_default = 20 if not limit_max else min(20, limit_max) limit = int(request.GET.get('limit', limit_default)) if limit_max and limit > limit_max: raise BadRequestError("limit cannot be higher than %d" % limit_max) assert limit >= 0 except (ValueError, AssertionError): raise BadRequestError("limit must be a positive integer") start = offset stop = offset + limit self.view = view self.total_count = queryset.count() return queryset[start:stop] def get_paginated_response(self, data): data = OrderedDict([ ('meta', OrderedDict([ ('total_count', self.total_count), ])), ('items', data), ]) return Response(data) ## Instruction: Remove assert from WagtailPagination.paginate_queryset method ## Code After: from collections import OrderedDict from django.conf import settings from rest_framework.pagination import BasePagination from rest_framework.response import Response from .utils import BadRequestError class WagtailPagination(BasePagination): def paginate_queryset(self, queryset, request, view=None): limit_max = getattr(settings, 'WAGTAILAPI_LIMIT_MAX', 20) try: offset = int(request.GET.get('offset', 0)) if offset < 0: raise ValueError() except ValueError: raise BadRequestError("offset must be a positive integer") try: limit_default = 20 if not limit_max else min(20, limit_max) limit = int(request.GET.get('limit', limit_default)) if limit < 0: raise ValueError() except ValueError: raise BadRequestError("limit must be a positive integer") if limit_max and limit > limit_max: raise BadRequestError( "limit cannot be higher than %d" % limit_max) start = offset stop = offset + limit self.view = view self.total_count = queryset.count() return queryset[start:stop] def get_paginated_response(self, data): data = OrderedDict([ ('meta', OrderedDict([ ('total_count', self.total_count), ])), ('items', data), ]) return Response(data)
from collections import OrderedDict from django.conf import settings from rest_framework.pagination import BasePagination from rest_framework.response import Response from .utils import BadRequestError class WagtailPagination(BasePagination): def paginate_queryset(self, queryset, request, view=None): limit_max = getattr(settings, 'WAGTAILAPI_LIMIT_MAX', 20) try: offset = int(request.GET.get('offset', 0)) - assert offset >= 0 ? ^^^^^^ ^^ + if offset < 0: ? ^^ ^ + - except (ValueError, AssertionError): + raise ValueError() + except ValueError: raise BadRequestError("offset must be a positive integer") try: limit_default = 20 if not limit_max else min(20, limit_max) limit = int(request.GET.get('limit', limit_default)) + if limit < 0: + raise ValueError() + except ValueError: + raise BadRequestError("limit must be a positive integer") - if limit_max and limit > limit_max: ? ---- + if limit_max and limit > limit_max: + raise BadRequestError( - raise BadRequestError("limit cannot be higher than %d" % limit_max) ? ---------------------- + "limit cannot be higher than %d" % limit_max) - - assert limit >= 0 - except (ValueError, AssertionError): - raise BadRequestError("limit must be a positive integer") start = offset stop = offset + limit self.view = view self.total_count = queryset.count() return queryset[start:stop] def get_paginated_response(self, data): data = OrderedDict([ ('meta', OrderedDict([ ('total_count', self.total_count), ])), ('items', data), ]) return Response(data)
f8677eff328d50e16b51c2802b3f9e168c38534b
user_test.py
user_test.py
try: import sympy except ImportError: print("sympy is required") else: if sympy.__version__ < '0.7.5': print("SymPy version 0.7.5 or newer is required. You have", sympy.__version__) if sympy.__version__ != '0.7.5': print("The stable SymPy version 0.7.5 is recommended. You have", sympy.__version__) try: import matplotlib except ImportError: print("matplotlib is required for the plotting section of the tutorial") try: import IPython except ImportError: print("IPython notebook is required.") else: if IPython.__version__ < '2.1.0': print("The latest version of IPython is recommended. You have", IPython.__version__) print("""A fortran and/or C compiler is required for the code generation portion of the tutorial. However, if you do not have one, you should not worry, as it will not be a large part of the tutorial.""")
try: import sympy except ImportError: print("sympy is required") else: if sympy.__version__ < '1.0': print("SymPy version 1.0 or newer is required. You have", sympy.__version__) if sympy.__version__ != '1.0': print("The stable SymPy version 1.0 is recommended. You have", sympy.__version__) try: import matplotlib except ImportError: print("matplotlib is required for the plotting section of the tutorial") try: import IPython except ImportError: print("IPython notebook is required.") else: if IPython.__version__ < '4.1.2': print("The latest version of IPython is recommended. You have", IPython.__version__) print("""A fortran and/or C compiler is required for the code generation portion of the tutorial. However, if you do not have one, you should not worry, as it will not be a large part of the tutorial.""")
Update SymPy/IPython version in test script
Update SymPy/IPython version in test script
Python
bsd-3-clause
leosartaj/scipy-2016-tutorial,aktech/scipy-2016-tutorial
try: import sympy except ImportError: print("sympy is required") else: - if sympy.__version__ < '0.7.5': + if sympy.__version__ < '1.0': - print("SymPy version 0.7.5 or newer is required. You have", sympy.__version__) + print("SymPy version 1.0 or newer is required. You have", sympy.__version__) - if sympy.__version__ != '0.7.5': + if sympy.__version__ != '1.0': - print("The stable SymPy version 0.7.5 is recommended. You have", sympy.__version__) + print("The stable SymPy version 1.0 is recommended. You have", sympy.__version__) try: import matplotlib except ImportError: print("matplotlib is required for the plotting section of the tutorial") try: import IPython except ImportError: print("IPython notebook is required.") else: - if IPython.__version__ < '2.1.0': + if IPython.__version__ < '4.1.2': print("The latest version of IPython is recommended. You have", IPython.__version__) print("""A fortran and/or C compiler is required for the code generation portion of the tutorial. However, if you do not have one, you should not worry, as it will not be a large part of the tutorial.""")
Update SymPy/IPython version in test script
## Code Before: try: import sympy except ImportError: print("sympy is required") else: if sympy.__version__ < '0.7.5': print("SymPy version 0.7.5 or newer is required. You have", sympy.__version__) if sympy.__version__ != '0.7.5': print("The stable SymPy version 0.7.5 is recommended. You have", sympy.__version__) try: import matplotlib except ImportError: print("matplotlib is required for the plotting section of the tutorial") try: import IPython except ImportError: print("IPython notebook is required.") else: if IPython.__version__ < '2.1.0': print("The latest version of IPython is recommended. You have", IPython.__version__) print("""A fortran and/or C compiler is required for the code generation portion of the tutorial. However, if you do not have one, you should not worry, as it will not be a large part of the tutorial.""") ## Instruction: Update SymPy/IPython version in test script ## Code After: try: import sympy except ImportError: print("sympy is required") else: if sympy.__version__ < '1.0': print("SymPy version 1.0 or newer is required. You have", sympy.__version__) if sympy.__version__ != '1.0': print("The stable SymPy version 1.0 is recommended. You have", sympy.__version__) try: import matplotlib except ImportError: print("matplotlib is required for the plotting section of the tutorial") try: import IPython except ImportError: print("IPython notebook is required.") else: if IPython.__version__ < '4.1.2': print("The latest version of IPython is recommended. You have", IPython.__version__) print("""A fortran and/or C compiler is required for the code generation portion of the tutorial. However, if you do not have one, you should not worry, as it will not be a large part of the tutorial.""")
try: import sympy except ImportError: print("sympy is required") else: - if sympy.__version__ < '0.7.5': ? ---- + if sympy.__version__ < '1.0': ? ++ - print("SymPy version 0.7.5 or newer is required. You have", sympy.__version__) ? ---- + print("SymPy version 1.0 or newer is required. You have", sympy.__version__) ? ++ - if sympy.__version__ != '0.7.5': ? ---- + if sympy.__version__ != '1.0': ? ++ - print("The stable SymPy version 0.7.5 is recommended. You have", sympy.__version__) ? ---- + print("The stable SymPy version 1.0 is recommended. You have", sympy.__version__) ? ++ try: import matplotlib except ImportError: print("matplotlib is required for the plotting section of the tutorial") try: import IPython except ImportError: print("IPython notebook is required.") else: - if IPython.__version__ < '2.1.0': ? ^ ^ + if IPython.__version__ < '4.1.2': ? ^ ^ print("The latest version of IPython is recommended. You have", IPython.__version__) print("""A fortran and/or C compiler is required for the code generation portion of the tutorial. However, if you do not have one, you should not worry, as it will not be a large part of the tutorial.""")
7c591a38bc89350ea2586fb83a6880cdf71b4a9a
passwd_change.py
passwd_change.py
import sys _args = sys.argv if __name__ == "__main__": if len(_args) == 4: keys_file = _args[1] target_file = _args[2] result_file = _args[3] with open(keys_file, 'r') as k: keys = k.readlines() keys = [key.strip() for key in keys] keys = [key for key in keys if key != ''] with open(target_file, 'r') as t: target_lines = t.readlines() with open(result_file, 'w') as r: for line in target_lines: if line.split(':')[0] in keys: r.write(line) else: print('./passwd_change.py keys_file.txt passwd_file result_file')
import sys _args = sys.argv if __name__ == "__main__": if len(_args) == 4: keys_file = _args[1] target_file = _args[2] result_file = _args[3] try: with open(keys_file, 'r') as k: keys = k.readlines() keys = [key.strip().split('@')[0] for key in keys] keys = [key for key in keys if key != ''] with open(target_file, 'r') as t: target_lines = t.readlines() with open(result_file, 'w') as r: for line in target_lines: if line.split(':')[0] in keys or line.split(':')[3] != '12': r.write(line) except Exception as e: print(str(e)) sys.exit() else: print('./passwd_change.py keys_file.txt passwd_file result_file')
Add Exception to all with's block.
Add Exception to all with's block.
Python
mit
maxsocl/oldmailer
import sys - _args = sys.argv if __name__ == "__main__": if len(_args) == 4: keys_file = _args[1] target_file = _args[2] result_file = _args[3] + try: - with open(keys_file, 'r') as k: + with open(keys_file, 'r') as k: - keys = k.readlines() + keys = k.readlines() - keys = [key.strip() for key in keys] + keys = [key.strip().split('@')[0] for key in keys] - keys = [key for key in keys if key != ''] + keys = [key for key in keys if key != ''] - with open(target_file, 'r') as t: + with open(target_file, 'r') as t: - target_lines = t.readlines() + target_lines = t.readlines() - with open(result_file, 'w') as r: + with open(result_file, 'w') as r: - for line in target_lines: + for line in target_lines: - if line.split(':')[0] in keys: + if line.split(':')[0] in keys or line.split(':')[3] != '12': - r.write(line) + r.write(line) + + except Exception as e: + print(str(e)) + sys.exit() else: print('./passwd_change.py keys_file.txt passwd_file result_file') -
Add Exception to all with's block.
## Code Before: import sys _args = sys.argv if __name__ == "__main__": if len(_args) == 4: keys_file = _args[1] target_file = _args[2] result_file = _args[3] with open(keys_file, 'r') as k: keys = k.readlines() keys = [key.strip() for key in keys] keys = [key for key in keys if key != ''] with open(target_file, 'r') as t: target_lines = t.readlines() with open(result_file, 'w') as r: for line in target_lines: if line.split(':')[0] in keys: r.write(line) else: print('./passwd_change.py keys_file.txt passwd_file result_file') ## Instruction: Add Exception to all with's block. ## Code After: import sys _args = sys.argv if __name__ == "__main__": if len(_args) == 4: keys_file = _args[1] target_file = _args[2] result_file = _args[3] try: with open(keys_file, 'r') as k: keys = k.readlines() keys = [key.strip().split('@')[0] for key in keys] keys = [key for key in keys if key != ''] with open(target_file, 'r') as t: target_lines = t.readlines() with open(result_file, 'w') as r: for line in target_lines: if line.split(':')[0] in keys or line.split(':')[3] != '12': r.write(line) except Exception as e: print(str(e)) sys.exit() else: print('./passwd_change.py keys_file.txt passwd_file result_file')
import sys - _args = sys.argv if __name__ == "__main__": if len(_args) == 4: keys_file = _args[1] target_file = _args[2] result_file = _args[3] + try: - with open(keys_file, 'r') as k: + with open(keys_file, 'r') as k: ? ++++ - keys = k.readlines() + keys = k.readlines() ? ++++ - keys = [key.strip() for key in keys] + keys = [key.strip().split('@')[0] for key in keys] ? ++++ ++++++++++++++ - keys = [key for key in keys if key != ''] + keys = [key for key in keys if key != ''] ? ++++ - with open(target_file, 'r') as t: + with open(target_file, 'r') as t: ? ++++ - target_lines = t.readlines() + target_lines = t.readlines() ? ++++ - with open(result_file, 'w') as r: + with open(result_file, 'w') as r: ? ++++ - for line in target_lines: + for line in target_lines: ? ++++ - if line.split(':')[0] in keys: + if line.split(':')[0] in keys or line.split(':')[3] != '12': - r.write(line) + r.write(line) ? ++++ + + except Exception as e: + print(str(e)) + sys.exit() else: print('./passwd_change.py keys_file.txt passwd_file result_file') -
0e66044b3949255be2653c0cffee53b003ea3929
solum/api/controllers/v1/pub/trigger.py
solum/api/controllers/v1/pub/trigger.py
import pecan from pecan import rest from solum.api.handlers import assembly_handler from solum.common import exception class TriggerController(rest.RestController): """Manages triggers.""" @pecan.expose() def post(self, trigger_id): """Trigger a new event on Solum.""" handler = assembly_handler.AssemblyHandler(None) try: handler.trigger_workflow(trigger_id) except exception.ResourceNotFound as excp: pecan.response.status = excp.code pecan.response.text = excp.message return pecan.response.status = 202
import pecan from pecan import rest from solum.api.handlers import assembly_handler from solum.common import exception class TriggerController(rest.RestController): """Manages triggers.""" @pecan.expose() def post(self, trigger_id): """Trigger a new event on Solum.""" handler = assembly_handler.AssemblyHandler(None) try: handler.trigger_workflow(trigger_id) pecan.response.status = 202 except exception.ResourceNotFound as excp: pecan.response.status = excp.code pecan.response.body = excp.message
Fix pecan error message not available in body for Trigger.post
Fix pecan error message not available in body for Trigger.post When a trigger_id resource is not found, the API returns a 202 status code, the error message could not be added in the body of the request because pecan.response.text was used instead of pecan.response.body. Change-Id: I8b03210b5a2f2b5c0ea24bfc8149cca122dffeea Closes-Bug: #1324940
Python
apache-2.0
ed-/solum,stackforge/solum,ed-/solum,devdattakulkarni/test-solum,gilbertpilz/solum,ed-/solum,openstack/solum,gilbertpilz/solum,ed-/solum,devdattakulkarni/test-solum,openstack/solum,gilbertpilz/solum,stackforge/solum,gilbertpilz/solum
import pecan from pecan import rest from solum.api.handlers import assembly_handler from solum.common import exception class TriggerController(rest.RestController): """Manages triggers.""" @pecan.expose() def post(self, trigger_id): """Trigger a new event on Solum.""" handler = assembly_handler.AssemblyHandler(None) try: handler.trigger_workflow(trigger_id) + pecan.response.status = 202 except exception.ResourceNotFound as excp: pecan.response.status = excp.code - pecan.response.text = excp.message + pecan.response.body = excp.message - return - pecan.response.status = 202
Fix pecan error message not available in body for Trigger.post
## Code Before: import pecan from pecan import rest from solum.api.handlers import assembly_handler from solum.common import exception class TriggerController(rest.RestController): """Manages triggers.""" @pecan.expose() def post(self, trigger_id): """Trigger a new event on Solum.""" handler = assembly_handler.AssemblyHandler(None) try: handler.trigger_workflow(trigger_id) except exception.ResourceNotFound as excp: pecan.response.status = excp.code pecan.response.text = excp.message return pecan.response.status = 202 ## Instruction: Fix pecan error message not available in body for Trigger.post ## Code After: import pecan from pecan import rest from solum.api.handlers import assembly_handler from solum.common import exception class TriggerController(rest.RestController): """Manages triggers.""" @pecan.expose() def post(self, trigger_id): """Trigger a new event on Solum.""" handler = assembly_handler.AssemblyHandler(None) try: handler.trigger_workflow(trigger_id) pecan.response.status = 202 except exception.ResourceNotFound as excp: pecan.response.status = excp.code pecan.response.body = excp.message
import pecan from pecan import rest from solum.api.handlers import assembly_handler from solum.common import exception class TriggerController(rest.RestController): """Manages triggers.""" @pecan.expose() def post(self, trigger_id): """Trigger a new event on Solum.""" handler = assembly_handler.AssemblyHandler(None) try: handler.trigger_workflow(trigger_id) + pecan.response.status = 202 except exception.ResourceNotFound as excp: pecan.response.status = excp.code - pecan.response.text = excp.message ? ^^^^ + pecan.response.body = excp.message ? ^^^^ - return - pecan.response.status = 202
ff28ca5797c4468dbe58d78d55b5df6b8878ac36
test_pep438.py
test_pep438.py
import unittest import pep438 if __name__ == '__main__': unittest.main()
import unittest import sys from io import StringIO from clint.textui import core import pep438 class patch_io(object): streams = ('stdout', 'stdin', 'stderr') def __init__(self): for stream in self.streams: setattr(self, stream, StringIO()) setattr(self, 'real_%s' % stream, getattr(sys, stream)) self.real_STDOUT = core.STDOUT self.real_STDERR = core.STDERR def __enter__(self): for stream in self.streams: setattr(sys, stream, getattr(self, stream)) self.STDOUT = self.stdout.write self.STDERR = self.stderr.write return self def __exit__(self, exc_type, exc_value, traceback): for stream in self.streams: getattr(sys, stream).close() setattr(sys, stream, getattr(self, 'real_%s' % stream)) core.STDOUT = self.real_STDOUT core.STDERR = self.real_STDERR class CommandLineTests(unittest.TestCase): def test_version(self): for args in (['pep438', '-v'], ['pep438', '--version']): with patch_io() as new: sys.argv = args self.assertRaises(SystemExit, pep438.main) self.assertEqual(new.stdout.getvalue(), "0.1.0\n") if __name__ == '__main__': unittest.main()
Add a basic command line test
Add a basic command line test
Python
mit
treyhunner/pep438
import unittest + import sys + from io import StringIO + + from clint.textui import core import pep438 + + + class patch_io(object): + + streams = ('stdout', 'stdin', 'stderr') + + def __init__(self): + for stream in self.streams: + setattr(self, stream, StringIO()) + setattr(self, 'real_%s' % stream, getattr(sys, stream)) + self.real_STDOUT = core.STDOUT + self.real_STDERR = core.STDERR + + def __enter__(self): + for stream in self.streams: + setattr(sys, stream, getattr(self, stream)) + self.STDOUT = self.stdout.write + self.STDERR = self.stderr.write + return self + + def __exit__(self, exc_type, exc_value, traceback): + for stream in self.streams: + getattr(sys, stream).close() + setattr(sys, stream, getattr(self, 'real_%s' % stream)) + core.STDOUT = self.real_STDOUT + core.STDERR = self.real_STDERR + + + class CommandLineTests(unittest.TestCase): + + def test_version(self): + for args in (['pep438', '-v'], ['pep438', '--version']): + with patch_io() as new: + sys.argv = args + self.assertRaises(SystemExit, pep438.main) + self.assertEqual(new.stdout.getvalue(), "0.1.0\n") if __name__ == '__main__': unittest.main()
Add a basic command line test
## Code Before: import unittest import pep438 if __name__ == '__main__': unittest.main() ## Instruction: Add a basic command line test ## Code After: import unittest import sys from io import StringIO from clint.textui import core import pep438 class patch_io(object): streams = ('stdout', 'stdin', 'stderr') def __init__(self): for stream in self.streams: setattr(self, stream, StringIO()) setattr(self, 'real_%s' % stream, getattr(sys, stream)) self.real_STDOUT = core.STDOUT self.real_STDERR = core.STDERR def __enter__(self): for stream in self.streams: setattr(sys, stream, getattr(self, stream)) self.STDOUT = self.stdout.write self.STDERR = self.stderr.write return self def __exit__(self, exc_type, exc_value, traceback): for stream in self.streams: getattr(sys, stream).close() setattr(sys, stream, getattr(self, 'real_%s' % stream)) core.STDOUT = self.real_STDOUT core.STDERR = self.real_STDERR class CommandLineTests(unittest.TestCase): def test_version(self): for args in (['pep438', '-v'], ['pep438', '--version']): with patch_io() as new: sys.argv = args self.assertRaises(SystemExit, pep438.main) self.assertEqual(new.stdout.getvalue(), "0.1.0\n") if __name__ == '__main__': unittest.main()
import unittest + import sys + from io import StringIO + + from clint.textui import core import pep438 + + + class patch_io(object): + + streams = ('stdout', 'stdin', 'stderr') + + def __init__(self): + for stream in self.streams: + setattr(self, stream, StringIO()) + setattr(self, 'real_%s' % stream, getattr(sys, stream)) + self.real_STDOUT = core.STDOUT + self.real_STDERR = core.STDERR + + def __enter__(self): + for stream in self.streams: + setattr(sys, stream, getattr(self, stream)) + self.STDOUT = self.stdout.write + self.STDERR = self.stderr.write + return self + + def __exit__(self, exc_type, exc_value, traceback): + for stream in self.streams: + getattr(sys, stream).close() + setattr(sys, stream, getattr(self, 'real_%s' % stream)) + core.STDOUT = self.real_STDOUT + core.STDERR = self.real_STDERR + + + class CommandLineTests(unittest.TestCase): + + def test_version(self): + for args in (['pep438', '-v'], ['pep438', '--version']): + with patch_io() as new: + sys.argv = args + self.assertRaises(SystemExit, pep438.main) + self.assertEqual(new.stdout.getvalue(), "0.1.0\n") if __name__ == '__main__': unittest.main()
1aa8344177a6e336075134ea802b14e14b8e2f03
utils.py
utils.py
def fix_str(value): try: return unicode(value) except UnicodeDecodeError: return unicode(value.decode('latin1')) def pandas_to_dict(df): return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i]) for i, colname in enumerate(df.columns)} for row in df.values]
from pandas import tslib def fix_render(value): if type(value) is str: try: return unicode(value) except UnicodeDecodeError: return unicode(value.decode('latin1')) elif type(value) is tslib.Timestamp: return value.strftime("%Y-%m-%d %H:%M:%S") return value def pandas_to_dict(df): return [{colname: fix_render(row[i]) for i, colname in enumerate(df.columns)} for row in df.values]
Fix date format on fix render
Fix date format on fix render
Python
mit
mlgruby/mining,chrisdamba/mining,mining/mining,jgabriellima/mining,AndrzejR/mining,chrisdamba/mining,mlgruby/mining,seagoat/mining,AndrzejR/mining,mining/mining,jgabriellima/mining,seagoat/mining,avelino/mining,avelino/mining,mlgruby/mining
+ from pandas import tslib - def fix_str(value): + def fix_render(value): + if type(value) is str: - try: + try: - return unicode(value) + return unicode(value) - except UnicodeDecodeError: + except UnicodeDecodeError: - return unicode(value.decode('latin1')) + return unicode(value.decode('latin1')) + elif type(value) is tslib.Timestamp: + return value.strftime("%Y-%m-%d %H:%M:%S") + return value def pandas_to_dict(df): - return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i]) + return [{colname: fix_render(row[i]) for i, colname in enumerate(df.columns)} for row in df.values]
Fix date format on fix render
## Code Before: def fix_str(value): try: return unicode(value) except UnicodeDecodeError: return unicode(value.decode('latin1')) def pandas_to_dict(df): return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i]) for i, colname in enumerate(df.columns)} for row in df.values] ## Instruction: Fix date format on fix render ## Code After: from pandas import tslib def fix_render(value): if type(value) is str: try: return unicode(value) except UnicodeDecodeError: return unicode(value.decode('latin1')) elif type(value) is tslib.Timestamp: return value.strftime("%Y-%m-%d %H:%M:%S") return value def pandas_to_dict(df): return [{colname: fix_render(row[i]) for i, colname in enumerate(df.columns)} for row in df.values]
+ from pandas import tslib - def fix_str(value): ? ^^ + def fix_render(value): ? ^^^^^ + if type(value) is str: - try: + try: ? ++++ - return unicode(value) + return unicode(value) ? ++++ - except UnicodeDecodeError: + except UnicodeDecodeError: ? ++++ - return unicode(value.decode('latin1')) + return unicode(value.decode('latin1')) ? ++++ + elif type(value) is tslib.Timestamp: + return value.strftime("%Y-%m-%d %H:%M:%S") + return value def pandas_to_dict(df): - return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i]) + return [{colname: fix_render(row[i]) for i, colname in enumerate(df.columns)} for row in df.values]
8e980445723f3f185eb88022adbd75f1a01aaef4
fabfile/ubuntu.py
fabfile/ubuntu.py
from StringIO import StringIO from fabric.api import local, task, run, env, sudo, get from fabric.tasks import execute from fabric.context_managers import lcd, hide @task def apt_update(): with hide('stdout'): sudo('apt-get update') sudo('apt-get -y upgrade')
from StringIO import StringIO from fabric.api import local, task, run, env, sudo, get from fabric.operations import reboot from fabric.tasks import execute from fabric.context_managers import lcd, hide @task def apt_update(): with hide('stdout'): sudo('apt-get update') sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o \ Dpkg::Options::="--force-confold" upgrade') reboot()
Add apt-get upgrade without grub-install
Add apt-get upgrade without grub-install
Python
mit
maruina/kanedias,maruina/kanedias,maruina/kanedias,maruina/kanedias
from StringIO import StringIO from fabric.api import local, task, run, env, sudo, get + from fabric.operations import reboot from fabric.tasks import execute from fabric.context_managers import lcd, hide @task def apt_update(): with hide('stdout'): sudo('apt-get update') - sudo('apt-get -y upgrade') + sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o \ + Dpkg::Options::="--force-confold" upgrade') + reboot() +
Add apt-get upgrade without grub-install
## Code Before: from StringIO import StringIO from fabric.api import local, task, run, env, sudo, get from fabric.tasks import execute from fabric.context_managers import lcd, hide @task def apt_update(): with hide('stdout'): sudo('apt-get update') sudo('apt-get -y upgrade') ## Instruction: Add apt-get upgrade without grub-install ## Code After: from StringIO import StringIO from fabric.api import local, task, run, env, sudo, get from fabric.operations import reboot from fabric.tasks import execute from fabric.context_managers import lcd, hide @task def apt_update(): with hide('stdout'): sudo('apt-get update') sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o \ Dpkg::Options::="--force-confold" upgrade') reboot()
from StringIO import StringIO from fabric.api import local, task, run, env, sudo, get + from fabric.operations import reboot from fabric.tasks import execute from fabric.context_managers import lcd, hide @task def apt_update(): with hide('stdout'): sudo('apt-get update') - sudo('apt-get -y upgrade') + sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o \ + Dpkg::Options::="--force-confold" upgrade') + reboot()
f2dc9b260e6ca1fcf46b9f23fad5478ab7ff28f8
ce/expr/common.py
ce/expr/common.py
from __future__ import print_function ADD_OP = '+' MULTIPLY_OP = '*' OPERATORS = [ADD_OP, MULTIPLY_OP] _cache_map = dict() def cached(f): def decorated(*args, **kwargs): key = (f, tuple(args), tuple(kwargs.items())) if key in _cache_map: return _cache_map[key] v = f(*args, **kwargs) _cache_map[key] = v return v return decorated
from __future__ import print_function ADD_OP = '+' MULTIPLY_OP = '*' OPERATORS = [ADD_OP, MULTIPLY_OP] def to_immutable(*m): def r(d): if isinstance(d, dict): return tuple((e, to_immutable(v)) for e, v in d.iteritems()) if isinstance(d, (list, tuple)): return tuple(to_immutable(e) for e in d) return d return tuple(r(e) for e in m) _cache_map = dict() def cached(f): def decorated(*args, **kwargs): key = to_immutable(f, args, kwargs.items()) if key in _cache_map: return _cache_map[key] v = f(*args, **kwargs) _cache_map[key] = v return v return decorated
Fix dict argument not hashable
Fix dict argument not hashable
Python
mit
admk/soap
from __future__ import print_function ADD_OP = '+' MULTIPLY_OP = '*' OPERATORS = [ADD_OP, MULTIPLY_OP] + def to_immutable(*m): + def r(d): + if isinstance(d, dict): + return tuple((e, to_immutable(v)) for e, v in d.iteritems()) + if isinstance(d, (list, tuple)): + return tuple(to_immutable(e) for e in d) + return d + return tuple(r(e) for e in m) + + _cache_map = dict() def cached(f): def decorated(*args, **kwargs): - key = (f, tuple(args), tuple(kwargs.items())) + key = to_immutable(f, args, kwargs.items()) if key in _cache_map: return _cache_map[key] v = f(*args, **kwargs) _cache_map[key] = v return v return decorated
Fix dict argument not hashable
## Code Before: from __future__ import print_function ADD_OP = '+' MULTIPLY_OP = '*' OPERATORS = [ADD_OP, MULTIPLY_OP] _cache_map = dict() def cached(f): def decorated(*args, **kwargs): key = (f, tuple(args), tuple(kwargs.items())) if key in _cache_map: return _cache_map[key] v = f(*args, **kwargs) _cache_map[key] = v return v return decorated ## Instruction: Fix dict argument not hashable ## Code After: from __future__ import print_function ADD_OP = '+' MULTIPLY_OP = '*' OPERATORS = [ADD_OP, MULTIPLY_OP] def to_immutable(*m): def r(d): if isinstance(d, dict): return tuple((e, to_immutable(v)) for e, v in d.iteritems()) if isinstance(d, (list, tuple)): return tuple(to_immutable(e) for e in d) return d return tuple(r(e) for e in m) _cache_map = dict() def cached(f): def decorated(*args, **kwargs): key = to_immutable(f, args, kwargs.items()) if key in _cache_map: return _cache_map[key] v = f(*args, **kwargs) _cache_map[key] = v return v return decorated
from __future__ import print_function ADD_OP = '+' MULTIPLY_OP = '*' OPERATORS = [ADD_OP, MULTIPLY_OP] + def to_immutable(*m): + def r(d): + if isinstance(d, dict): + return tuple((e, to_immutable(v)) for e, v in d.iteritems()) + if isinstance(d, (list, tuple)): + return tuple(to_immutable(e) for e in d) + return d + return tuple(r(e) for e in m) + + _cache_map = dict() def cached(f): def decorated(*args, **kwargs): - key = (f, tuple(args), tuple(kwargs.items())) ? ------ - ------ - + key = to_immutable(f, args, kwargs.items()) ? ++++++++++++ if key in _cache_map: return _cache_map[key] v = f(*args, **kwargs) _cache_map[key] = v return v return decorated
f5143ccb206e5b077f0a80c88555e57064b6acab
fabfile.py
fabfile.py
from fabric.api import * env.hosts = [ '192.168.1.144' ] env.user = 'pi' def prepare_raspberry_pi(): pass def remote_pull(): with cd('virtualenvs/queen/queen'): run('git pull') def deploy(): local('git commit -a') local('git push origin') remote_pull()
from fabric.api import * env.hosts = [ '192.168.1.144' ] env.user = 'pi' def prepare_raspberry_pi(): pass def remote_pull(): with cd('virtualenvs/queen/queen'): run('git pull') def commit(): local('git commit -a') def push(): local('git push origin') def deploy(): commit() push() remote_pull()
Add fab commands to push and pull
Add fab commands to push and pull
Python
mit
kalail/queen,kalail/queen
from fabric.api import * env.hosts = [ '192.168.1.144' ] env.user = 'pi' def prepare_raspberry_pi(): pass def remote_pull(): with cd('virtualenvs/queen/queen'): run('git pull') + def commit(): + local('git commit -a') + + def push(): + local('git push origin') + + def deploy(): - local('git commit -a') - local('git push origin') + commit() + push() remote_pull()
Add fab commands to push and pull
## Code Before: from fabric.api import * env.hosts = [ '192.168.1.144' ] env.user = 'pi' def prepare_raspberry_pi(): pass def remote_pull(): with cd('virtualenvs/queen/queen'): run('git pull') def deploy(): local('git commit -a') local('git push origin') remote_pull() ## Instruction: Add fab commands to push and pull ## Code After: from fabric.api import * env.hosts = [ '192.168.1.144' ] env.user = 'pi' def prepare_raspberry_pi(): pass def remote_pull(): with cd('virtualenvs/queen/queen'): run('git pull') def commit(): local('git commit -a') def push(): local('git push origin') def deploy(): commit() push() remote_pull()
from fabric.api import * env.hosts = [ '192.168.1.144' ] env.user = 'pi' def prepare_raspberry_pi(): pass def remote_pull(): with cd('virtualenvs/queen/queen'): run('git pull') + def commit(): + local('git commit -a') + + def push(): + local('git push origin') + + def deploy(): - local('git commit -a') - local('git push origin') + commit() + push() remote_pull()
d594747d7f5027b6994d98eaa17ed59d6dcb40de
tests/model/test_pwave_classifiers.py
tests/model/test_pwave_classifiers.py
from unittest import TestCase import numpy as np from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier class TestClassifier(TestCase): def test_classifier(self): limb = classifier[0] prec = classifier[1] X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1) X_test, Y_test = X_test[:, 0:8], X_test[:, 8:] d1 = limb.decision_function(X_test) d2 = prec.decision_function(X_test) d = np.column_stack((d1, d2)) np.testing.assert_almost_equal(d, Y_test)
from unittest import TestCase import os import numpy as np from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier path = os.path.dirname(__file__) class TestClassifier(TestCase): def test_classifier(self): limb = classifier[0] prec = classifier[1] X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1) X_test, Y_test = X_test[:, 0:8], X_test[:, 8:] d1 = limb.decision_function(X_test) d2 = prec.decision_function(X_test) d = np.column_stack((d1, d2)) np.testing.assert_almost_equal(d, Y_test)
Fix test path to file dir to be able to load classifier data
Fix test path to file dir to be able to load classifier data
Python
agpl-3.0
citiususc/construe,citiususc/construe,citiususc/construe
from unittest import TestCase + import os import numpy as np from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier + + path = os.path.dirname(__file__) class TestClassifier(TestCase): def test_classifier(self): limb = classifier[0] prec = classifier[1] - X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1) + X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1) X_test, Y_test = X_test[:, 0:8], X_test[:, 8:] d1 = limb.decision_function(X_test) d2 = prec.decision_function(X_test) d = np.column_stack((d1, d2)) np.testing.assert_almost_equal(d, Y_test)
Fix test path to file dir to be able to load classifier data
## Code Before: from unittest import TestCase import numpy as np from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier class TestClassifier(TestCase): def test_classifier(self): limb = classifier[0] prec = classifier[1] X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1) X_test, Y_test = X_test[:, 0:8], X_test[:, 8:] d1 = limb.decision_function(X_test) d2 = prec.decision_function(X_test) d = np.column_stack((d1, d2)) np.testing.assert_almost_equal(d, Y_test) ## Instruction: Fix test path to file dir to be able to load classifier data ## Code After: from unittest import TestCase import os import numpy as np from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier path = os.path.dirname(__file__) class TestClassifier(TestCase): def test_classifier(self): limb = classifier[0] prec = classifier[1] X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1) X_test, Y_test = X_test[:, 0:8], X_test[:, 8:] d1 = limb.decision_function(X_test) d2 = prec.decision_function(X_test) d = np.column_stack((d1, d2)) np.testing.assert_almost_equal(d, Y_test)
from unittest import TestCase + import os import numpy as np from construe.knowledge.abstraction_patterns.segmentation.pwave import _CLASSIFIERS as classifier + + path = os.path.dirname(__file__) class TestClassifier(TestCase): def test_classifier(self): limb = classifier[0] prec = classifier[1] - X_test = np.loadtxt("pw_samples.csv", delimiter=",", skiprows=1) + X_test = np.loadtxt("%s/pw_samples.csv" % path, delimiter=",", skiprows=1) ? +++ +++++++ X_test, Y_test = X_test[:, 0:8], X_test[:, 8:] d1 = limb.decision_function(X_test) d2 = prec.decision_function(X_test) d = np.column_stack((d1, d2)) np.testing.assert_almost_equal(d, Y_test)
5a682fccfb6d8e6db3eb100262cf628bfc10e829
categories/serializers.py
categories/serializers.py
from .models import Category, Keyword from rest_framework import serializers class CategorySerializer(serializers.ModelSerializer): class Meta(object): model = Category fields = ('pk', 'name', 'weight', 'comment_required') class KeywordSerializer(serializers.ModelSerializer): class Meta(object): model = Keyword fields = ('pk', 'name')
from .models import Category, Keyword from rest_framework import serializers class CategorySerializer(serializers.ModelSerializer): class Meta(object): model = Category fields = ('pk', 'name', 'comment_required') class KeywordSerializer(serializers.ModelSerializer): class Meta(object): model = Keyword fields = ('pk', 'name')
Remove weight from category serializer
Remove weight from category serializer
Python
apache-2.0
belatrix/BackendAllStars
from .models import Category, Keyword from rest_framework import serializers class CategorySerializer(serializers.ModelSerializer): class Meta(object): model = Category - fields = ('pk', 'name', 'weight', 'comment_required') + fields = ('pk', 'name', 'comment_required') class KeywordSerializer(serializers.ModelSerializer): class Meta(object): model = Keyword fields = ('pk', 'name')
Remove weight from category serializer
## Code Before: from .models import Category, Keyword from rest_framework import serializers class CategorySerializer(serializers.ModelSerializer): class Meta(object): model = Category fields = ('pk', 'name', 'weight', 'comment_required') class KeywordSerializer(serializers.ModelSerializer): class Meta(object): model = Keyword fields = ('pk', 'name') ## Instruction: Remove weight from category serializer ## Code After: from .models import Category, Keyword from rest_framework import serializers class CategorySerializer(serializers.ModelSerializer): class Meta(object): model = Category fields = ('pk', 'name', 'comment_required') class KeywordSerializer(serializers.ModelSerializer): class Meta(object): model = Keyword fields = ('pk', 'name')
from .models import Category, Keyword from rest_framework import serializers class CategorySerializer(serializers.ModelSerializer): class Meta(object): model = Category - fields = ('pk', 'name', 'weight', 'comment_required') ? ---------- + fields = ('pk', 'name', 'comment_required') class KeywordSerializer(serializers.ModelSerializer): class Meta(object): model = Keyword fields = ('pk', 'name')
0b7e957fea7bbd08c79c2b2b4d9b8edfced38496
tests/providers.py
tests/providers.py
import unittest import foauth.providers class ProviderTests(unittest.TestCase): def setUp(self): class Example(foauth.providers.OAuth): provider_url = 'http://example.com' api_domain = 'api.example.com' self.provider = Example def test_auto_name(self): self.assertEqual(self.provider.name, 'Example') def test_auto_alias(self): self.assertEqual(self.provider.alias, 'example') def test_auto_favicon_url(self): url = 'https://www.google.com/s2/favicons?domain=example.com' self.assertEqual(self.provider.favicon_url, url) def test_auto_api_domains(self): self.assertEqual(self.provider.api_domains, ['api.example.com'])
import unittest import foauth.providers import urllib class ProviderTests(unittest.TestCase): def setUp(self): class Example(foauth.providers.OAuth): provider_url = 'http://example.com' api_domain = 'api.example.com' self.provider = Example def test_auto_name(self): self.assertEqual(self.provider.name, 'Example') def test_auto_alias(self): self.assertEqual(self.provider.alias, 'example') def test_auto_favicon_url(self): primary = 'https://getfavicon.appspot.com/http://example.com' backup = 'https://www.google.com/s2/favicons?domain=example.com' url = '%s?defaulticon=%s' % (primary, urllib.quote(backup)) self.assertEqual(self.provider.favicon_url, url) def test_auto_api_domains(self): self.assertEqual(self.provider.api_domains, ['api.example.com'])
Fix favicon tests to match the new scheme
Fix favicon tests to match the new scheme
Python
bsd-3-clause
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
import unittest import foauth.providers + import urllib class ProviderTests(unittest.TestCase): def setUp(self): class Example(foauth.providers.OAuth): provider_url = 'http://example.com' api_domain = 'api.example.com' self.provider = Example def test_auto_name(self): self.assertEqual(self.provider.name, 'Example') def test_auto_alias(self): self.assertEqual(self.provider.alias, 'example') def test_auto_favicon_url(self): + primary = 'https://getfavicon.appspot.com/http://example.com' - url = 'https://www.google.com/s2/favicons?domain=example.com' + backup = 'https://www.google.com/s2/favicons?domain=example.com' + url = '%s?defaulticon=%s' % (primary, urllib.quote(backup)) self.assertEqual(self.provider.favicon_url, url) def test_auto_api_domains(self): self.assertEqual(self.provider.api_domains, ['api.example.com'])
Fix favicon tests to match the new scheme
## Code Before: import unittest import foauth.providers class ProviderTests(unittest.TestCase): def setUp(self): class Example(foauth.providers.OAuth): provider_url = 'http://example.com' api_domain = 'api.example.com' self.provider = Example def test_auto_name(self): self.assertEqual(self.provider.name, 'Example') def test_auto_alias(self): self.assertEqual(self.provider.alias, 'example') def test_auto_favicon_url(self): url = 'https://www.google.com/s2/favicons?domain=example.com' self.assertEqual(self.provider.favicon_url, url) def test_auto_api_domains(self): self.assertEqual(self.provider.api_domains, ['api.example.com']) ## Instruction: Fix favicon tests to match the new scheme ## Code After: import unittest import foauth.providers import urllib class ProviderTests(unittest.TestCase): def setUp(self): class Example(foauth.providers.OAuth): provider_url = 'http://example.com' api_domain = 'api.example.com' self.provider = Example def test_auto_name(self): self.assertEqual(self.provider.name, 'Example') def test_auto_alias(self): self.assertEqual(self.provider.alias, 'example') def test_auto_favicon_url(self): primary = 'https://getfavicon.appspot.com/http://example.com' backup = 'https://www.google.com/s2/favicons?domain=example.com' url = '%s?defaulticon=%s' % (primary, urllib.quote(backup)) self.assertEqual(self.provider.favicon_url, url) def test_auto_api_domains(self): self.assertEqual(self.provider.api_domains, ['api.example.com'])
import unittest import foauth.providers + import urllib class ProviderTests(unittest.TestCase): def setUp(self): class Example(foauth.providers.OAuth): provider_url = 'http://example.com' api_domain = 'api.example.com' self.provider = Example def test_auto_name(self): self.assertEqual(self.provider.name, 'Example') def test_auto_alias(self): self.assertEqual(self.provider.alias, 'example') def test_auto_favicon_url(self): + primary = 'https://getfavicon.appspot.com/http://example.com' - url = 'https://www.google.com/s2/favicons?domain=example.com' ? ^^ + backup = 'https://www.google.com/s2/favicons?domain=example.com' ? ++++ ^ + url = '%s?defaulticon=%s' % (primary, urllib.quote(backup)) self.assertEqual(self.provider.favicon_url, url) def test_auto_api_domains(self): self.assertEqual(self.provider.api_domains, ['api.example.com'])
b2018bcf9274e0e641e132fe866ef630f99d98a3
profiles/modules/codes/extensions/codes.py
profiles/modules/codes/extensions/codes.py
from django.db import models from django.utils.translation import ugettext_lazy as _ def register(cls, admin_cls): cls.add_to_class('code', models.ForeignKey('profiles.Code', verbose_name=_('Registration code'), null=True, blank=True)) if admin_cls: admin_cls.list_display_filter += ['code', ] if admin_cls.fieldsets: admin_cls.fieldsets.append((_('Registration code'), { 'fields': ['code',], 'classes': ('collapse',), })) admin_cls.filter_horizontal = admin_cls.filter_horizontal + ('code',)
from django.db import models from django.utils.translation import ugettext_lazy as _ def register(cls, admin_cls): cls.add_to_class('code', models.ForeignKey('profiles.Code', verbose_name=_('Registration code'), null=True, blank=True)) if admin_cls: admin_cls.list_display_filter += ['code', ] if admin_cls.fieldsets: admin_cls.fieldsets.append((_('Registration code'), { 'fields': ['code',], 'classes': ('collapse',), })) if admin_cls.filter_horizontal: admin_cls.filter_horizontal = admin_cls.filter_horizontal + ('code',)
Fix terrible error when filter_horizontal of admin class has not existed.
Fix terrible error when filter_horizontal of admin class has not existed.
Python
bsd-2-clause
incuna/django-extensible-profiles
from django.db import models from django.utils.translation import ugettext_lazy as _ def register(cls, admin_cls): cls.add_to_class('code', models.ForeignKey('profiles.Code', verbose_name=_('Registration code'), null=True, blank=True)) if admin_cls: admin_cls.list_display_filter += ['code', ] if admin_cls.fieldsets: admin_cls.fieldsets.append((_('Registration code'), { 'fields': ['code',], 'classes': ('collapse',), })) + + if admin_cls.filter_horizontal: admin_cls.filter_horizontal = admin_cls.filter_horizontal + ('code',)
Fix terrible error when filter_horizontal of admin class has not existed.
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ def register(cls, admin_cls): cls.add_to_class('code', models.ForeignKey('profiles.Code', verbose_name=_('Registration code'), null=True, blank=True)) if admin_cls: admin_cls.list_display_filter += ['code', ] if admin_cls.fieldsets: admin_cls.fieldsets.append((_('Registration code'), { 'fields': ['code',], 'classes': ('collapse',), })) admin_cls.filter_horizontal = admin_cls.filter_horizontal + ('code',) ## Instruction: Fix terrible error when filter_horizontal of admin class has not existed. ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ def register(cls, admin_cls): cls.add_to_class('code', models.ForeignKey('profiles.Code', verbose_name=_('Registration code'), null=True, blank=True)) if admin_cls: admin_cls.list_display_filter += ['code', ] if admin_cls.fieldsets: admin_cls.fieldsets.append((_('Registration code'), { 'fields': ['code',], 'classes': ('collapse',), })) if admin_cls.filter_horizontal: admin_cls.filter_horizontal = admin_cls.filter_horizontal + ('code',)
from django.db import models from django.utils.translation import ugettext_lazy as _ def register(cls, admin_cls): cls.add_to_class('code', models.ForeignKey('profiles.Code', verbose_name=_('Registration code'), null=True, blank=True)) if admin_cls: admin_cls.list_display_filter += ['code', ] if admin_cls.fieldsets: admin_cls.fieldsets.append((_('Registration code'), { 'fields': ['code',], 'classes': ('collapse',), })) + + if admin_cls.filter_horizontal: admin_cls.filter_horizontal = admin_cls.filter_horizontal + ('code',)
5487126bfc3c4fd16243b9c7e00b204f2f8d7374
tests/test_znc.py
tests/test_znc.py
def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening
from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
Tweak the infratest a bit
Tweak the infratest a bit
Python
mit
triplepoint/ansible-znc
+ from testinfra.utils.ansible_runner import AnsibleRunner + + testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') + + + def test_service_enabled(Service): + service = Service('znc') + assert service.is_enabled + + def test_service_running(Service): service = Service('znc') assert service.is_running - def test_socket_listening(Socket): + def test_socket_listening_ipv4(Socket): - socket = Socket('tcp://127.0.0.1:6666') + socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening + + def test_socket_listening_ipv6(Socket): + socket = Socket('tcp://:::6666') + assert not socket.is_listening +
Tweak the infratest a bit
## Code Before: def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening ## Instruction: Tweak the infratest a bit ## Code After: from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
+ from testinfra.utils.ansible_runner import AnsibleRunner + + testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') + + + def test_service_enabled(Service): + service = Service('znc') + assert service.is_enabled + + def test_service_running(Service): service = Service('znc') assert service.is_running - def test_socket_listening(Socket): + def test_socket_listening_ipv4(Socket): ? +++++ - socket = Socket('tcp://127.0.0.1:6666') ? ^^^ ^ + socket = Socket('tcp://0.0.0.0:6666') ? ^ ^ assert socket.is_listening + + + def test_socket_listening_ipv6(Socket): + socket = Socket('tcp://:::6666') + assert not socket.is_listening
dad3eb5c1b0e188671884e97260422a90bdd5c21
gitcommitautosave.py
gitcommitautosave.py
import sublime_plugin class GitCommitAutoSave(sublime_plugin.EventListener): def on_load(self, view): if view.file_name().endswith('COMMIT_EDITMSG'): view.set_scratch(True) # disable save file dialog on exit def on_pre_close(self, view): if view.file_name().endswith('COMMIT_EDITMSG'): view.run_command("save")
import sublime_plugin class GitCommitAutoSave(sublime_plugin.EventListener): def on_load(self, view): if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): view.set_scratch(True) # disable save file dialog on exit def on_pre_close(self, view): if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): view.run_command("save")
Fix 'NoneType' object has no attribute 'endswith'
Fix 'NoneType' object has no attribute 'endswith'
Python
mit
aristidesfl/sublime-git-commit-message-auto-save
import sublime_plugin class GitCommitAutoSave(sublime_plugin.EventListener): def on_load(self, view): - if view.file_name().endswith('COMMIT_EDITMSG'): + if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): view.set_scratch(True) # disable save file dialog on exit def on_pre_close(self, view): - if view.file_name().endswith('COMMIT_EDITMSG'): + if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): view.run_command("save")
Fix 'NoneType' object has no attribute 'endswith'
## Code Before: import sublime_plugin class GitCommitAutoSave(sublime_plugin.EventListener): def on_load(self, view): if view.file_name().endswith('COMMIT_EDITMSG'): view.set_scratch(True) # disable save file dialog on exit def on_pre_close(self, view): if view.file_name().endswith('COMMIT_EDITMSG'): view.run_command("save") ## Instruction: Fix 'NoneType' object has no attribute 'endswith' ## Code After: import sublime_plugin class GitCommitAutoSave(sublime_plugin.EventListener): def on_load(self, view): if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): view.set_scratch(True) # disable save file dialog on exit def on_pre_close(self, view): if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): view.run_command("save")
import sublime_plugin class GitCommitAutoSave(sublime_plugin.EventListener): def on_load(self, view): - if view.file_name().endswith('COMMIT_EDITMSG'): + if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): ? +++++++++++++++++++++ view.set_scratch(True) # disable save file dialog on exit def on_pre_close(self, view): - if view.file_name().endswith('COMMIT_EDITMSG'): + if view.file_name() and view.file_name().endswith('COMMIT_EDITMSG'): ? +++++++++++++++++++++ view.run_command("save")
c21bea6d80287d7c42b3634a7612e4e8cbc419be
plotnine/exceptions.py
plotnine/exceptions.py
from textwrap import dedent import warnings # Statsmodels is slow to fix upstream future warnings # This module is imported before the stats module so # so any FutureWarnings with the imports are suppressed warnings.filterwarnings( 'ignore', category=FutureWarning, module='statsmodels') warnings.filterwarnings( 'ignore', category=FutureWarning, module='pandas') # These are rare warnings.filterwarnings( 'ignore', category=FutureWarning, module='scipy') class PlotnineError(Exception): """ Exception for ggplot errors """ def __init__(self, *args): args = [dedent(arg) for arg in args] self.message = " ".join(args) def __str__(self): return repr(self.message) class PlotnineWarning(UserWarning): """ Warnings for ggplot inconsistencies """ pass
from textwrap import dedent import warnings # Statsmodels is slow to fix upstream future warnings # This module is imported before the stats module so # so any FutureWarnings with the imports are suppressed warnings.filterwarnings( 'ignore', category=FutureWarning, module='statsmodels') warnings.filterwarnings( 'ignore', category=FutureWarning, module='pandas') # These are rare warnings.filterwarnings( 'ignore', category=FutureWarning, module='scipy') # Show the warnings on one line, leaving out any code makes the # message clear def warning_format(message, category, filename, lineno, file=None, line=None): fmt = '{}:{}: {}: {}\n'.format return fmt(filename, lineno, category.__name__, message) warnings.formatwarning = warning_format class PlotnineError(Exception): """ Exception for ggplot errors """ def __init__(self, *args): args = [dedent(arg) for arg in args] self.message = " ".join(args) def __str__(self): return repr(self.message) class PlotnineWarning(UserWarning): """ Warnings for ggplot inconsistencies """ pass
Print warnings on one line & no code
ENH: Print warnings on one line & no code
Python
mit
has2k1/plotnine,has2k1/plotnine
from textwrap import dedent import warnings # Statsmodels is slow to fix upstream future warnings # This module is imported before the stats module so # so any FutureWarnings with the imports are suppressed warnings.filterwarnings( 'ignore', category=FutureWarning, module='statsmodels') warnings.filterwarnings( 'ignore', category=FutureWarning, module='pandas') # These are rare warnings.filterwarnings( 'ignore', category=FutureWarning, module='scipy') + # Show the warnings on one line, leaving out any code makes the + # message clear + def warning_format(message, category, filename, lineno, file=None, line=None): + fmt = '{}:{}: {}: {}\n'.format + return fmt(filename, lineno, category.__name__, message) + + + warnings.formatwarning = warning_format + + class PlotnineError(Exception): """ Exception for ggplot errors """ def __init__(self, *args): args = [dedent(arg) for arg in args] self.message = " ".join(args) def __str__(self): return repr(self.message) class PlotnineWarning(UserWarning): """ Warnings for ggplot inconsistencies """ pass
Print warnings on one line & no code
## Code Before: from textwrap import dedent import warnings # Statsmodels is slow to fix upstream future warnings # This module is imported before the stats module so # so any FutureWarnings with the imports are suppressed warnings.filterwarnings( 'ignore', category=FutureWarning, module='statsmodels') warnings.filterwarnings( 'ignore', category=FutureWarning, module='pandas') # These are rare warnings.filterwarnings( 'ignore', category=FutureWarning, module='scipy') class PlotnineError(Exception): """ Exception for ggplot errors """ def __init__(self, *args): args = [dedent(arg) for arg in args] self.message = " ".join(args) def __str__(self): return repr(self.message) class PlotnineWarning(UserWarning): """ Warnings for ggplot inconsistencies """ pass ## Instruction: Print warnings on one line & no code ## Code After: from textwrap import dedent import warnings # Statsmodels is slow to fix upstream future warnings # This module is imported before the stats module so # so any FutureWarnings with the imports are suppressed warnings.filterwarnings( 'ignore', category=FutureWarning, module='statsmodels') warnings.filterwarnings( 'ignore', category=FutureWarning, module='pandas') # These are rare warnings.filterwarnings( 'ignore', category=FutureWarning, module='scipy') # Show the warnings on one line, leaving out any code makes the # message clear def warning_format(message, category, filename, lineno, file=None, line=None): fmt = '{}:{}: {}: {}\n'.format return fmt(filename, lineno, category.__name__, message) warnings.formatwarning = warning_format class PlotnineError(Exception): """ Exception for ggplot errors """ def __init__(self, *args): args = [dedent(arg) for arg in args] self.message = " ".join(args) def __str__(self): return repr(self.message) class PlotnineWarning(UserWarning): """ Warnings for ggplot inconsistencies """ pass
from textwrap import dedent import warnings # Statsmodels is slow to fix upstream future warnings # This module is imported before the stats module so # so any FutureWarnings with the imports are suppressed warnings.filterwarnings( 'ignore', category=FutureWarning, module='statsmodels') warnings.filterwarnings( 'ignore', category=FutureWarning, module='pandas') # These are rare warnings.filterwarnings( 'ignore', category=FutureWarning, module='scipy') + # Show the warnings on one line, leaving out any code makes the + # message clear + def warning_format(message, category, filename, lineno, file=None, line=None): + fmt = '{}:{}: {}: {}\n'.format + return fmt(filename, lineno, category.__name__, message) + + + warnings.formatwarning = warning_format + + class PlotnineError(Exception): """ Exception for ggplot errors """ def __init__(self, *args): args = [dedent(arg) for arg in args] self.message = " ".join(args) def __str__(self): return repr(self.message) class PlotnineWarning(UserWarning): """ Warnings for ggplot inconsistencies """ pass
3d3aba1ff780061ced014c4387f1d91b9fb168db
skimage/measure/__init__.py
skimage/measure/__init__.py
from ._find_contours import find_contours from ._marching_cubes import marching_cubes, mesh_surface_area from ._regionprops import regionprops, perimeter from ._structural_similarity import structural_similarity from ._polygon import approximate_polygon, subdivide_polygon from ._moments import moments, moments_central, moments_normalized, moments_hu from .profile import profile_line from .fit import LineModel, CircleModel, EllipseModel, ransac from .block import block_reduce __all__ = ['find_contours', 'regionprops', 'perimeter', 'structural_similarity', 'approximate_polygon', 'subdivide_polygon', 'LineModel', 'CircleModel', 'EllipseModel', 'ransac', 'block_reduce', 'moments', 'moments_central', 'moments_normalized', 'moments_hu', 'marching_cubes', 'mesh_surface_area', 'profile_line']
from ._find_contours import find_contours from ._marching_cubes import (marching_cubes, mesh_surface_area, correct_mesh_orientation) from ._regionprops import regionprops, perimeter from ._structural_similarity import structural_similarity from ._polygon import approximate_polygon, subdivide_polygon from ._moments import moments, moments_central, moments_normalized, moments_hu from .profile import profile_line from .fit import LineModel, CircleModel, EllipseModel, ransac from .block import block_reduce __all__ = ['find_contours', 'regionprops', 'perimeter', 'structural_similarity', 'approximate_polygon', 'subdivide_polygon', 'LineModel', 'CircleModel', 'EllipseModel', 'ransac', 'block_reduce', 'moments', 'moments_central', 'moments_normalized', 'moments_hu', 'marching_cubes', 'mesh_surface_area', 'correct_mesh_orientation', 'profile_line']
Add correct_mesh_orientation to skimage.measure imports
Add correct_mesh_orientation to skimage.measure imports
Python
bsd-3-clause
rjeli/scikit-image,pratapvardhan/scikit-image,paalge/scikit-image,WarrenWeckesser/scikits-image,blink1073/scikit-image,juliusbierk/scikit-image,robintw/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,chintak/scikit-image,bsipocz/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,michaelaye/scikit-image,chriscrosscutler/scikit-image,robintw/scikit-image,newville/scikit-image,emon10005/scikit-image,ajaybhat/scikit-image,bsipocz/scikit-image,keflavich/scikit-image,michaelaye/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,GaZ3ll3/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image,SamHames/scikit-image,ClinicalGraphics/scikit-image,michaelpacer/scikit-image,paalge/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,oew1v07/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,chintak/scikit-image,bennlich/scikit-image,GaZ3ll3/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,Midafi/scikit-image,emon10005/scikit-image,chintak/scikit-image,newville/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,chriscrosscutler/scikit-image,warmspringwinds/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,Britefury/scikit-image,Britefury/scikit-image,paalge/scikit-image,dpshelio/scikit-image,juliusbierk/scikit-image,Midafi/scikit-image,michaelpacer/scikit-image,bennlich/scikit-image,SamHames/scikit-image,SamHames/scikit-image,blink1073/scikit-image,rjeli/scikit-image,vighneshbirodkar/scikit-image
from ._find_contours import find_contours - from ._marching_cubes import marching_cubes, mesh_surface_area + from ._marching_cubes import (marching_cubes, mesh_surface_area, + correct_mesh_orientation) from ._regionprops import regionprops, perimeter from ._structural_similarity import structural_similarity from ._polygon import approximate_polygon, subdivide_polygon from ._moments import moments, moments_central, moments_normalized, moments_hu from .profile import profile_line from .fit import LineModel, CircleModel, EllipseModel, ransac from .block import block_reduce __all__ = ['find_contours', 'regionprops', 'perimeter', 'structural_similarity', 'approximate_polygon', 'subdivide_polygon', 'LineModel', 'CircleModel', 'EllipseModel', 'ransac', 'block_reduce', 'moments', 'moments_central', 'moments_normalized', 'moments_hu', 'marching_cubes', 'mesh_surface_area', + 'correct_mesh_orientation', 'profile_line']
Add correct_mesh_orientation to skimage.measure imports
## Code Before: from ._find_contours import find_contours from ._marching_cubes import marching_cubes, mesh_surface_area from ._regionprops import regionprops, perimeter from ._structural_similarity import structural_similarity from ._polygon import approximate_polygon, subdivide_polygon from ._moments import moments, moments_central, moments_normalized, moments_hu from .profile import profile_line from .fit import LineModel, CircleModel, EllipseModel, ransac from .block import block_reduce __all__ = ['find_contours', 'regionprops', 'perimeter', 'structural_similarity', 'approximate_polygon', 'subdivide_polygon', 'LineModel', 'CircleModel', 'EllipseModel', 'ransac', 'block_reduce', 'moments', 'moments_central', 'moments_normalized', 'moments_hu', 'marching_cubes', 'mesh_surface_area', 'profile_line'] ## Instruction: Add correct_mesh_orientation to skimage.measure imports ## Code After: from ._find_contours import find_contours from ._marching_cubes import (marching_cubes, mesh_surface_area, correct_mesh_orientation) from ._regionprops import regionprops, perimeter from ._structural_similarity import structural_similarity from ._polygon import approximate_polygon, subdivide_polygon from ._moments import moments, moments_central, moments_normalized, moments_hu from .profile import profile_line from .fit import LineModel, CircleModel, EllipseModel, ransac from .block import block_reduce __all__ = ['find_contours', 'regionprops', 'perimeter', 'structural_similarity', 'approximate_polygon', 'subdivide_polygon', 'LineModel', 'CircleModel', 'EllipseModel', 'ransac', 'block_reduce', 'moments', 'moments_central', 'moments_normalized', 'moments_hu', 'marching_cubes', 'mesh_surface_area', 'correct_mesh_orientation', 'profile_line']
from ._find_contours import find_contours - from ._marching_cubes import marching_cubes, mesh_surface_area + from ._marching_cubes import (marching_cubes, mesh_surface_area, ? + + + correct_mesh_orientation) from ._regionprops import regionprops, perimeter from ._structural_similarity import structural_similarity from ._polygon import approximate_polygon, subdivide_polygon from ._moments import moments, moments_central, moments_normalized, moments_hu from .profile import profile_line from .fit import LineModel, CircleModel, EllipseModel, ransac from .block import block_reduce __all__ = ['find_contours', 'regionprops', 'perimeter', 'structural_similarity', 'approximate_polygon', 'subdivide_polygon', 'LineModel', 'CircleModel', 'EllipseModel', 'ransac', 'block_reduce', 'moments', 'moments_central', 'moments_normalized', 'moments_hu', 'marching_cubes', 'mesh_surface_area', + 'correct_mesh_orientation', 'profile_line']
8a6015610bba2dcdc0a2cb031b2f58606328841f
src/fastpb/generator.py
src/fastpb/generator.py
import plugin_pb2 from jinja2 import Template from pkg_resources import resource_string import os.path import sys import tempfile def main(): log = sys.stderr request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) path = tempfile.mkdtemp() generateFiles = set(request.file_to_generate) files = [] for file in request.proto_file: if file.name not in generateFiles: continue name = file.name.split('.')[0] files.append(name) context = { 'moduleName': name, 'messages': file.message_type } cFilePath = os.path.join(path, name + '.c') with open(cFilePath, 'w') as f: t = Template(resource_string(__name__, 'template/module.jinja.c')) f.write(t.render(context)) setupPyPath = os.path.join(path, 'setup.py') with open(setupPyPath, 'w') as f: t = Template(resource_string(__name__, 'template/setup.jinja.py')) f.write(t.render({'files': files})) print >> log, path if __name__ == '__main__': main()
import plugin_pb2 from jinja2 import Template from pkg_resources import resource_string import os.path import sys import tempfile def main(): request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) response = plugin_pb2.CodeGeneratorResponse() generateFiles = set(request.file_to_generate) files = [] for file in request.proto_file: if file.name not in generateFiles: continue name = file.name.split('.')[0] files.append(name) context = { 'moduleName': name, 'messages': file.message_type } # Write the C file. t = Template(resource_string(__name__, 'template/module.jinja.c')) cFile = response.file.add() cFile.name = name + '.c' cFile.content = t.render(context) # Write setup.py. t = Template(resource_string(__name__, 'template/setup.jinja.py')) setupFile = response.file.add() setupFile.name = 'setup.py' setupFile.content = t.render({'files': files}) sys.stdout.write(response.SerializeToString()) if __name__ == '__main__': main()
Use protoc for file output
Use protoc for file output
Python
apache-2.0
Cue/fast-python-pb
import plugin_pb2 from jinja2 import Template from pkg_resources import resource_string import os.path import sys import tempfile def main(): - log = sys.stderr - request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) - path = tempfile.mkdtemp() + response = plugin_pb2.CodeGeneratorResponse() generateFiles = set(request.file_to_generate) files = [] for file in request.proto_file: if file.name not in generateFiles: continue name = file.name.split('.')[0] files.append(name) context = { 'moduleName': name, 'messages': file.message_type } + # Write the C file. - cFilePath = os.path.join(path, name + '.c') - with open(cFilePath, 'w') as f: - t = Template(resource_string(__name__, 'template/module.jinja.c')) + t = Template(resource_string(__name__, 'template/module.jinja.c')) - f.write(t.render(context)) + cFile = response.file.add() + cFile.name = name + '.c' + cFile.content = t.render(context) + # Write setup.py. - setupPyPath = os.path.join(path, 'setup.py') - with open(setupPyPath, 'w') as f: - t = Template(resource_string(__name__, 'template/setup.jinja.py')) + t = Template(resource_string(__name__, 'template/setup.jinja.py')) - f.write(t.render({'files': files})) + setupFile = response.file.add() + setupFile.name = 'setup.py' + setupFile.content = t.render({'files': files}) - print >> log, path + sys.stdout.write(response.SerializeToString()) if __name__ == '__main__': main()
Use protoc for file output
## Code Before: import plugin_pb2 from jinja2 import Template from pkg_resources import resource_string import os.path import sys import tempfile def main(): log = sys.stderr request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) path = tempfile.mkdtemp() generateFiles = set(request.file_to_generate) files = [] for file in request.proto_file: if file.name not in generateFiles: continue name = file.name.split('.')[0] files.append(name) context = { 'moduleName': name, 'messages': file.message_type } cFilePath = os.path.join(path, name + '.c') with open(cFilePath, 'w') as f: t = Template(resource_string(__name__, 'template/module.jinja.c')) f.write(t.render(context)) setupPyPath = os.path.join(path, 'setup.py') with open(setupPyPath, 'w') as f: t = Template(resource_string(__name__, 'template/setup.jinja.py')) f.write(t.render({'files': files})) print >> log, path if __name__ == '__main__': main() ## Instruction: Use protoc for file output ## Code After: import plugin_pb2 from jinja2 import Template from pkg_resources import resource_string import os.path import sys import tempfile def main(): request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) response = plugin_pb2.CodeGeneratorResponse() generateFiles = set(request.file_to_generate) files = [] for file in request.proto_file: if file.name not in generateFiles: continue name = file.name.split('.')[0] files.append(name) context = { 'moduleName': name, 'messages': file.message_type } # Write the C file. t = Template(resource_string(__name__, 'template/module.jinja.c')) cFile = response.file.add() cFile.name = name + '.c' cFile.content = t.render(context) # Write setup.py. t = Template(resource_string(__name__, 'template/setup.jinja.py')) setupFile = response.file.add() setupFile.name = 'setup.py' setupFile.content = t.render({'files': files}) sys.stdout.write(response.SerializeToString()) if __name__ == '__main__': main()
import plugin_pb2 from jinja2 import Template from pkg_resources import resource_string import os.path import sys import tempfile def main(): - log = sys.stderr - request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) - path = tempfile.mkdtemp() + response = plugin_pb2.CodeGeneratorResponse() generateFiles = set(request.file_to_generate) files = [] for file in request.proto_file: if file.name not in generateFiles: continue name = file.name.split('.')[0] files.append(name) context = { 'moduleName': name, 'messages': file.message_type } + # Write the C file. - cFilePath = os.path.join(path, name + '.c') - with open(cFilePath, 'w') as f: - t = Template(resource_string(__name__, 'template/module.jinja.c')) ? -- + t = Template(resource_string(__name__, 'template/module.jinja.c')) - f.write(t.render(context)) + cFile = response.file.add() + cFile.name = name + '.c' + cFile.content = t.render(context) + # Write setup.py. - setupPyPath = os.path.join(path, 'setup.py') - with open(setupPyPath, 'w') as f: - t = Template(resource_string(__name__, 'template/setup.jinja.py')) ? -- + t = Template(resource_string(__name__, 'template/setup.jinja.py')) - f.write(t.render({'files': files})) + setupFile = response.file.add() + setupFile.name = 'setup.py' + setupFile.content = t.render({'files': files}) - print >> log, path + sys.stdout.write(response.SerializeToString()) if __name__ == '__main__': main()
21bf18a03c485304aa00dc2af86aa91930e4b1ac
tests/test_grammar.py
tests/test_grammar.py
import pytest from parglare import Grammar from parglare.exceptions import GrammarError def test_terminal_nonterminal_conflict(): # Production A is a terminal ("a") and non-terminal at the same time. g = """ A = "a" | B; B = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e) def test_multiple_terminal_definition(): g = """ S = A A; A = "a"; A = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e)
import pytest from parglare import Grammar def test_terminal_nonterminal(): # Production A is a terminal ("a") and non-terminal at the same time. # Thus, it must be recognized as non-terminal. g = """ S = A B; A = "a" | B; B = "b"; """ Grammar.from_string(g) # Here A shoud be non-terminal while B will be terminal. g = """ S = A B; A = B; B = "b"; """ Grammar.from_string(g) def test_multiple_terminal_definition(): # A is defined multiple times as terminal thus it must be recognized # as non-terminal with alternative expansions. g = """ S = A A; A = "a"; A = "b"; """ Grammar.from_string(g)
Fix in tests for terminal definitions.
Fix in tests for terminal definitions.
Python
mit
igordejanovic/parglare,igordejanovic/parglare
import pytest from parglare import Grammar - from parglare.exceptions import GrammarError - def test_terminal_nonterminal_conflict(): + def test_terminal_nonterminal(): # Production A is a terminal ("a") and non-terminal at the same time. + # Thus, it must be recognized as non-terminal. g = """ + S = A B; A = "a" | B; B = "b"; """ - try: - Grammar.from_string(g) + Grammar.from_string(g) - assert False - except GrammarError as e: - assert 'Multiple definition' in str(e) + + # Here A shoud be non-terminal while B will be terminal. + g = """ + S = A B; + A = B; + B = "b"; + """ + + Grammar.from_string(g) def test_multiple_terminal_definition(): + # A is defined multiple times as terminal thus it must be recognized + # as non-terminal with alternative expansions. g = """ S = A A; A = "a"; A = "b"; """ - try: - Grammar.from_string(g) - assert False - except GrammarError as e: - assert 'Multiple definition' in str(e) + Grammar.from_string(g) +
Fix in tests for terminal definitions.
## Code Before: import pytest from parglare import Grammar from parglare.exceptions import GrammarError def test_terminal_nonterminal_conflict(): # Production A is a terminal ("a") and non-terminal at the same time. g = """ A = "a" | B; B = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e) def test_multiple_terminal_definition(): g = """ S = A A; A = "a"; A = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e) ## Instruction: Fix in tests for terminal definitions. ## Code After: import pytest from parglare import Grammar def test_terminal_nonterminal(): # Production A is a terminal ("a") and non-terminal at the same time. # Thus, it must be recognized as non-terminal. g = """ S = A B; A = "a" | B; B = "b"; """ Grammar.from_string(g) # Here A shoud be non-terminal while B will be terminal. g = """ S = A B; A = B; B = "b"; """ Grammar.from_string(g) def test_multiple_terminal_definition(): # A is defined multiple times as terminal thus it must be recognized # as non-terminal with alternative expansions. g = """ S = A A; A = "a"; A = "b"; """ Grammar.from_string(g)
import pytest from parglare import Grammar - from parglare.exceptions import GrammarError - def test_terminal_nonterminal_conflict(): ? --------- + def test_terminal_nonterminal(): # Production A is a terminal ("a") and non-terminal at the same time. + # Thus, it must be recognized as non-terminal. g = """ + S = A B; A = "a" | B; B = "b"; """ - try: - Grammar.from_string(g) ? ---- + Grammar.from_string(g) - assert False - except GrammarError as e: - assert 'Multiple definition' in str(e) + + # Here A shoud be non-terminal while B will be terminal. + g = """ + S = A B; + A = B; + B = "b"; + """ + + Grammar.from_string(g) def test_multiple_terminal_definition(): + # A is defined multiple times as terminal thus it must be recognized + # as non-terminal with alternative expansions. g = """ S = A A; A = "a"; A = "b"; """ - try: + - Grammar.from_string(g) ? ---- + Grammar.from_string(g) - assert False - except GrammarError as e: - assert 'Multiple definition' in str(e)
c3284516e8dc2c7fccfbf7e4bff46a66b4ad2f15
cref/evaluation/__init__.py
cref/evaluation/__init__.py
import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 100 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } output_files = predict_fasta(fasta_file, output_dir, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 5 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } prediction_output = output_dir + str(run) os.mkdir(prediction_output) output_files = predict_fasta(fasta_file, prediction_output, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
Save output for every run
Save output for every run
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] - runs = 100 + runs = 5 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } + prediction_output = output_dir + str(run) + os.mkdir(prediction_output) - output_files = predict_fasta(fasta_file, output_dir, params) + output_files = predict_fasta(fasta_file, prediction_output, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
Save output for every run
## Code Before: import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 100 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } output_files = predict_fasta(fasta_file, output_dir, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds)) ## Instruction: Save output for every run ## Code After: import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 5 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } prediction_output = output_dir + str(run) os.mkdir(prediction_output) output_files = predict_fasta(fasta_file, prediction_output, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] - runs = 100 ? ^^^ + runs = 5 ? ^ fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } + prediction_output = output_dir + str(run) + os.mkdir(prediction_output) - output_files = predict_fasta(fasta_file, output_dir, params) ? ---- + output_files = predict_fasta(fasta_file, prediction_output, params) ? +++++++++++ predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
c465b1f0c995ac2cb7c6c8b4ad5f721f800e2864
argparams.py
argparams.py
from __future__ import print_function, division class ARGparams(object): """Class for ARG model parameters. Attributes ---------- scale : float rho : float delta : float Methods ------- convert_to_theta Convert parameters to the vector """ def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None): """Initialize the class instance. """ if theta: assert len(theta) == 3, "Wrong number of parameters in theta!" [scale, rho, delta] = theta self.scale = scale self.rho = rho self.delta = delta assert scale > 0, "Scale must be greater than zero!" self.beta = self.rho / self.scale self.theta = [scale, rho, delta] def __repr__(self): """This is what is shown when you interactively explore the instance. """ params = (self.scale, self.rho, self.delta) string = "scale = %.2f, rho = %.2f, delta = %.2f" % params return string def __str__(self): """This is what is shown when you print() the instance. """ return self.__repr__()
from __future__ import print_function, division class ARGparams(object): """Class for ARG model parameters. Attributes ---------- scale : float rho : float delta : float beta : float theta : list Raises ------ AssertionError """ def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None): """Initialize the class instance. """ if theta: assert len(theta) == 3, "Wrong number of parameters in theta!" [scale, rho, delta] = theta self.scale = scale self.rho = rho self.delta = delta assert scale > 0, "Scale must be greater than zero!" self.beta = self.rho / self.scale self.theta = [scale, rho, delta] def __repr__(self): """This is what is shown when you interactively explore the instance. """ params = (self.scale, self.rho, self.delta) string = "scale = %.2f, rho = %.2f, delta = %.2f" % params return string def __str__(self): """This is what is shown when you print() the instance. """ return self.__repr__()
Fix incorrect attributes in ARGparams class
Fix incorrect attributes in ARGparams class
Python
mit
khrapovs/argamma
from __future__ import print_function, division class ARGparams(object): """Class for ARG model parameters. Attributes ---------- scale : float rho : float delta : float + beta : float + theta : list - Methods + Raises - ------- + ------ + AssertionError - convert_to_theta - Convert parameters to the vector """ def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None): """Initialize the class instance. """ if theta: assert len(theta) == 3, "Wrong number of parameters in theta!" [scale, rho, delta] = theta self.scale = scale self.rho = rho self.delta = delta assert scale > 0, "Scale must be greater than zero!" self.beta = self.rho / self.scale self.theta = [scale, rho, delta] def __repr__(self): """This is what is shown when you interactively explore the instance. """ params = (self.scale, self.rho, self.delta) string = "scale = %.2f, rho = %.2f, delta = %.2f" % params return string def __str__(self): """This is what is shown when you print() the instance. """ return self.__repr__()
Fix incorrect attributes in ARGparams class
## Code Before: from __future__ import print_function, division class ARGparams(object): """Class for ARG model parameters. Attributes ---------- scale : float rho : float delta : float Methods ------- convert_to_theta Convert parameters to the vector """ def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None): """Initialize the class instance. """ if theta: assert len(theta) == 3, "Wrong number of parameters in theta!" [scale, rho, delta] = theta self.scale = scale self.rho = rho self.delta = delta assert scale > 0, "Scale must be greater than zero!" self.beta = self.rho / self.scale self.theta = [scale, rho, delta] def __repr__(self): """This is what is shown when you interactively explore the instance. """ params = (self.scale, self.rho, self.delta) string = "scale = %.2f, rho = %.2f, delta = %.2f" % params return string def __str__(self): """This is what is shown when you print() the instance. """ return self.__repr__() ## Instruction: Fix incorrect attributes in ARGparams class ## Code After: from __future__ import print_function, division class ARGparams(object): """Class for ARG model parameters. Attributes ---------- scale : float rho : float delta : float beta : float theta : list Raises ------ AssertionError """ def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None): """Initialize the class instance. """ if theta: assert len(theta) == 3, "Wrong number of parameters in theta!" [scale, rho, delta] = theta self.scale = scale self.rho = rho self.delta = delta assert scale > 0, "Scale must be greater than zero!" self.beta = self.rho / self.scale self.theta = [scale, rho, delta] def __repr__(self): """This is what is shown when you interactively explore the instance. """ params = (self.scale, self.rho, self.delta) string = "scale = %.2f, rho = %.2f, delta = %.2f" % params return string def __str__(self): """This is what is shown when you print() the instance. """ return self.__repr__()
from __future__ import print_function, division class ARGparams(object): """Class for ARG model parameters. Attributes ---------- scale : float rho : float delta : float + beta : float + theta : list - Methods + Raises - ------- ? - + ------ + AssertionError - convert_to_theta - Convert parameters to the vector """ def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None): """Initialize the class instance. """ if theta: assert len(theta) == 3, "Wrong number of parameters in theta!" [scale, rho, delta] = theta self.scale = scale self.rho = rho self.delta = delta assert scale > 0, "Scale must be greater than zero!" self.beta = self.rho / self.scale self.theta = [scale, rho, delta] def __repr__(self): """This is what is shown when you interactively explore the instance. """ params = (self.scale, self.rho, self.delta) string = "scale = %.2f, rho = %.2f, delta = %.2f" % params return string def __str__(self): """This is what is shown when you print() the instance. """ return self.__repr__()
30ffff16e5dd4eec6e5128a277a677834470be73
scikits/learn/tests/test_cross_val.py
scikits/learn/tests/test_cross_val.py
import numpy as np import nose from .. import cross_val def test_kfold(): # Check that errors are raise if there is not enough samples nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3) y = [0, 0, 1, 1, 2] nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3)
import numpy as np import nose from ..base import BaseEstimator from .. import cross_val class MockClassifier(BaseEstimator): """Dummy classifier to test the cross-validation """ def __init__(self, a=0): self.a = a def fit(self, X, Y, **params): self._set_params(**params) return self def predict(self, T): return T.shape[0] def score(self, X=None, Y=None): return 1./(1+np.abs(self.a)) X = np.ones((10, 2)) y = np.arange(10)/2 ################################################################################ # Tests def test_kfold(): # Check that errors are raise if there is not enough samples nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3) y = [0, 0, 1, 1, 2] nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3) def test_cross_val_score(): clf = MockClassifier() for a in range(-10, 10): clf.a = a # Smoke test score = cross_val.cross_val_score(clf, X, y) np.testing.assert_array_equal(score, clf.score(X, y))
Add a smoke test for cross_val_score
TEST: Add a smoke test for cross_val_score
Python
bsd-3-clause
mjgrav2001/scikit-learn,marcocaccin/scikit-learn,jmschrei/scikit-learn,ycaihua/scikit-learn,sarahgrogan/scikit-learn,xubenben/scikit-learn,appapantula/scikit-learn,mehdidc/scikit-learn,bikong2/scikit-learn,cainiaocome/scikit-learn,hsiaoyi0504/scikit-learn,Titan-C/scikit-learn,hsuantien/scikit-learn,kmike/scikit-learn,Vimos/scikit-learn,pythonvietnam/scikit-learn,AlexanderFabisch/scikit-learn,madjelan/scikit-learn,hsiaoyi0504/scikit-learn,ycaihua/scikit-learn,madjelan/scikit-learn,mhue/scikit-learn,qifeigit/scikit-learn,RPGOne/scikit-learn,tmhm/scikit-learn,jseabold/scikit-learn,alvarofierroclavero/scikit-learn,thientu/scikit-learn,dingocuster/scikit-learn,yonglehou/scikit-learn,dingocuster/scikit-learn,massmutual/scikit-learn,rvraghav93/scikit-learn,pypot/scikit-learn,hainm/scikit-learn,giorgiop/scikit-learn,rsivapr/scikit-learn,vortex-ape/scikit-learn,bikong2/scikit-learn,sergeyf/scikit-learn,vybstat/scikit-learn,ltiao/scikit-learn,schets/scikit-learn,lenovor/scikit-learn,Lawrence-Liu/scikit-learn,mjudsp/Tsallis,moutai/scikit-learn,ElDeveloper/scikit-learn,xwolf12/scikit-learn,ChanderG/scikit-learn,Windy-Ground/scikit-learn,devanshdalal/scikit-learn,zorojean/scikit-learn,deepesch/scikit-learn,chrisburr/scikit-learn,chrisburr/scikit-learn,loli/sklearn-ensembletrees,luo66/scikit-learn,pompiduskus/scikit-learn,pompiduskus/scikit-learn,jorik041/scikit-learn,lin-credible/scikit-learn,plissonf/scikit-learn,alexeyum/scikit-learn,trankmichael/scikit-learn,nomadcube/scikit-learn,AlexRobson/scikit-learn,belltailjp/scikit-learn,ivannz/scikit-learn,PatrickChrist/scikit-learn,cainiaocome/scikit-learn,rishikksh20/scikit-learn,marcocaccin/scikit-learn,jpautom/scikit-learn,fbagirov/scikit-learn,JsNoNo/scikit-learn,NunoEdgarGub1/scikit-learn,zorojean/scikit-learn,potash/scikit-learn,hdmetor/scikit-learn,alexsavio/scikit-learn,phdowling/scikit-learn,cwu2011/scikit-learn,costypetrisor/scikit-learn,carrillo/scikit-learn,hdmetor/scikit-learn,zaxtax/scikit-learn,chrsrds/scikit-learn,vinayak-mehta/scikit-learn,lenovor/scikit-learn,nomadcube/scikit-learn,moutai/scikit-learn,LiaoPan/scikit-learn,cainiaocome/scikit-learn,cwu2011/scikit-learn,HolgerPeters/scikit-learn,liyu1990/sklearn,henrykironde/scikit-learn,xyguo/scikit-learn,NelisVerhoef/scikit-learn,Nyker510/scikit-learn,costypetrisor/scikit-learn,hdmetor/scikit-learn,rohanp/scikit-learn,AlexRobson/scikit-learn,tomlof/scikit-learn,YinongLong/scikit-learn,eickenberg/scikit-learn,ChanChiChoi/scikit-learn,jseabold/scikit-learn,fabioticconi/scikit-learn,mwv/scikit-learn,krez13/scikit-learn,lbishal/scikit-learn,MatthieuBizien/scikit-learn,lucidfrontier45/scikit-learn,simon-pepin/scikit-learn,phdowling/scikit-learn,moutai/scikit-learn,YinongLong/scikit-learn,bhargav/scikit-learn,sgenoud/scikit-learn,xzh86/scikit-learn,abhishekkrthakur/scikit-learn,herilalaina/scikit-learn,robin-lai/scikit-learn,yonglehou/scikit-learn,cwu2011/scikit-learn,jkarnows/scikit-learn,sergeyf/scikit-learn,xubenben/scikit-learn,nelson-liu/scikit-learn,trungnt13/scikit-learn,jayflo/scikit-learn,ningchi/scikit-learn,Windy-Ground/scikit-learn,samzhang111/scikit-learn,RachitKansal/scikit-learn,mxjl620/scikit-learn,ominux/scikit-learn,ZenDevelopmentSystems/scikit-learn,russel1237/scikit-learn,btabibian/scikit-learn,lbishal/scikit-learn,ChanChiChoi/scikit-learn,xiaoxiamii/scikit-learn,3manuek/scikit-learn,icdishb/scikit-learn,yunfeilu/scikit-learn,rexshihaoren/scikit-learn,rsivapr/scikit-learn,DonBeo/scikit-learn,MohammedWasim/scikit-learn,khkaminska/scikit-learn,bikong2/scikit-learn,smartscheduling/scikit-learn-categorical-tree,liyu1990/sklearn,zorroblue/scikit-learn,alvarofierroclavero/scikit-learn,aflaxman/scikit-learn,xavierwu/scikit-learn,jkarnows/scikit-learn,mwv/scikit-learn,mattgiguere/scikit-learn,liberatorqjw/scikit-learn,beepee14/scikit-learn,jm-begon/scikit-learn,mhdella/scikit-learn,Garrett-R/scikit-learn,zaxtax/scikit-learn,shahankhatch/scikit-learn,altairpearl/scikit-learn,idlead/scikit-learn,jpautom/scikit-learn,aabadie/scikit-learn,henrykironde/scikit-learn,kevin-intel/scikit-learn,Jimmy-Morzaria/scikit-learn,saiwing-yeung/scikit-learn,betatim/scikit-learn,sanketloke/scikit-learn,arabenjamin/scikit-learn,yunfeilu/scikit-learn,florian-f/sklearn,kevin-intel/scikit-learn,ominux/scikit-learn,andaag/scikit-learn,zuku1985/scikit-learn,BiaDarkia/scikit-learn,huzq/scikit-learn,huobaowangxi/scikit-learn,RomainBrault/scikit-learn,AlexanderFabisch/scikit-learn,amueller/scikit-learn,manhhomienbienthuy/scikit-learn,walterreade/scikit-learn,MartinDelzant/scikit-learn,nesterione/scikit-learn,murali-munna/scikit-learn,HolgerPeters/scikit-learn,iismd17/scikit-learn,ilyes14/scikit-learn,ahoyosid/scikit-learn,shangwuhencc/scikit-learn,simon-pepin/scikit-learn,q1ang/scikit-learn,roxyboy/scikit-learn,ClimbsRocks/scikit-learn,adamgreenhall/scikit-learn,0asa/scikit-learn,yonglehou/scikit-learn,liangz0707/scikit-learn,lin-credible/scikit-learn,mjgrav2001/scikit-learn,ZenDevelopmentSystems/scikit-learn,raghavrv/scikit-learn,mugizico/scikit-learn,h2educ/scikit-learn,btabibian/scikit-learn,jorge2703/scikit-learn,russel1237/scikit-learn,MartinDelzant/scikit-learn,nmayorov/scikit-learn,mayblue9/scikit-learn,shikhardb/scikit-learn,ashhher3/scikit-learn,stylianos-kampakis/scikit-learn,jorik041/scikit-learn,vibhorag/scikit-learn,kagayakidan/scikit-learn,ngoix/OCRF,sonnyhu/scikit-learn,manhhomienbienthuy/scikit-learn,joshloyal/scikit-learn,harshaneelhg/scikit-learn,kmike/scikit-learn,Sentient07/scikit-learn,xuewei4d/scikit-learn,waterponey/scikit-learn,olologin/scikit-learn,tomlof/scikit-learn,dhruv13J/scikit-learn,treycausey/scikit-learn,ankurankan/scikit-learn,466152112/scikit-learn,evgchz/scikit-learn,khkaminska/scikit-learn,jmetzen/scikit-learn,ilo10/scikit-learn,mayblue9/scikit-learn,jorge2703/scikit-learn,idlead/scikit-learn,mojoboss/scikit-learn,bhargav/scikit-learn,manashmndl/scikit-learn,pythonvietnam/scikit-learn,aetilley/scikit-learn,CVML/scikit-learn,jakirkham/scikit-learn,shenzebang/scikit-learn,mlyundin/scikit-learn,ilyes14/scikit-learn,3manuek/scikit-learn,ishanic/scikit-learn,terkkila/scikit-learn,ky822/scikit-learn,huzq/scikit-learn,jjx02230808/project0223,btabibian/scikit-learn,yunfeilu/scikit-learn,OshynSong/scikit-learn,huobaowangxi/scikit-learn,clemkoa/scikit-learn,jaidevd/scikit-learn,glennq/scikit-learn,UNR-AERIAL/scikit-learn,joshloyal/scikit-learn,rishikksh20/scikit-learn,tosolveit/scikit-learn,schets/scikit-learn,mattilyra/scikit-learn,vybstat/scikit-learn,nvoron23/scikit-learn,jorge2703/scikit-learn,beepee14/scikit-learn,Akshay0724/scikit-learn,evgchz/scikit-learn,ngoix/OCRF,yanlend/scikit-learn,xuewei4d/scikit-learn,aflaxman/scikit-learn,PatrickChrist/scikit-learn,Akshay0724/scikit-learn,loli/semisupervisedforests,AlexandreAbraham/scikit-learn,kashif/scikit-learn,massmutual/scikit-learn,DSLituiev/scikit-learn,mjgrav2001/scikit-learn,hlin117/scikit-learn,IndraVikas/scikit-learn,MartinSavc/scikit-learn,OshynSong/scikit-learn,bikong2/scikit-learn,xubenben/scikit-learn,anntzer/scikit-learn,ningchi/scikit-learn,3manuek/scikit-learn,fengzhyuan/scikit-learn,mattgiguere/scikit-learn,cybernet14/scikit-learn,dsullivan7/scikit-learn,shahankhatch/scikit-learn,aminert/scikit-learn,amueller/scikit-learn,sinhrks/scikit-learn,robin-lai/scikit-learn,UNR-AERIAL/scikit-learn,mlyundin/scikit-learn,bthirion/scikit-learn,pompiduskus/scikit-learn,spallavolu/scikit-learn,sonnyhu/scikit-learn,mattilyra/scikit-learn,murali-munna/scikit-learn,B3AU/waveTree,LohithBlaze/scikit-learn,huobaowangxi/scikit-learn,Sentient07/scikit-learn,yanlend/scikit-learn,JPFrancoia/scikit-learn,harshaneelhg/scikit-learn,rohanp/scikit-learn,petosegan/scikit-learn,Titan-C/scikit-learn,h2educ/scikit-learn,andrewnc/scikit-learn,equialgo/scikit-learn,nesterione/scikit-learn,sarahgrogan/scikit-learn,Garrett-R/scikit-learn,gotomypc/scikit-learn,dhruv13J/scikit-learn,Garrett-R/scikit-learn,rrohan/scikit-learn,jakirkham/scikit-learn,hainm/scikit-learn,BiaDarkia/scikit-learn,qifeigit/scikit-learn,andrewnc/scikit-learn,meduz/scikit-learn,wazeerzulfikar/scikit-learn,olologin/scikit-learn,jereze/scikit-learn,meduz/scikit-learn,vybstat/scikit-learn,shusenl/scikit-learn,ycaihua/scikit-learn,tomlof/scikit-learn,nikitasingh981/scikit-learn,imaculate/scikit-learn,larsmans/scikit-learn,appapantula/scikit-learn,mhue/scikit-learn,NelisVerhoef/scikit-learn,jpautom/scikit-learn,ElDeveloper/scikit-learn,massmutual/scikit-learn,vivekmishra1991/scikit-learn,ChanChiChoi/scikit-learn,marcocaccin/scikit-learn,akionakamura/scikit-learn,hsuantien/scikit-learn,glennq/scikit-learn,mattilyra/scikit-learn,themrmax/scikit-learn,henridwyer/scikit-learn,bthirion/scikit-learn,lucidfrontier45/scikit-learn,depet/scikit-learn,jorik041/scikit-learn,mfjb/scikit-learn,procoder317/scikit-learn,kaichogami/scikit-learn,untom/scikit-learn,Srisai85/scikit-learn,liyu1990/sklearn,pnedunuri/scikit-learn,jkarnows/scikit-learn,liberatorqjw/scikit-learn,nrhine1/scikit-learn,joernhees/scikit-learn,MohammedWasim/scikit-learn,waterponey/scikit-learn,ominux/scikit-learn,idlead/scikit-learn,andaag/scikit-learn,loli/semisupervisedforests,raghavrv/scikit-learn,frank-tancf/scikit-learn,Jimmy-Morzaria/scikit-learn,abhishekkrthakur/scikit-learn,jmetzen/scikit-learn,maheshakya/scikit-learn,Nyker510/scikit-learn,Myasuka/scikit-learn,abhishekkrthakur/scikit-learn,ephes/scikit-learn,ssaeger/scikit-learn,pv/scikit-learn,anurag313/scikit-learn,cl4rke/scikit-learn,sinhrks/scikit-learn,rrohan/scikit-learn,plissonf/scikit-learn,akionakamura/scikit-learn,vshtanko/scikit-learn,BiaDarkia/scikit-learn,billy-inn/scikit-learn,vybstat/scikit-learn,cauchycui/scikit-learn,glennq/scikit-learn,tdhopper/scikit-learn,thilbern/scikit-learn,florian-f/sklearn,arjoly/scikit-learn,sgenoud/scikit-learn,hsiaoyi0504/scikit-learn,JPFrancoia/scikit-learn,deepesch/scikit-learn,jlegendary/scikit-learn,larsmans/scikit-learn,thientu/scikit-learn,kagayakidan/scikit-learn,B3AU/waveTree,potash/scikit-learn,arabenjamin/scikit-learn,devanshdalal/scikit-learn,jmschrei/scikit-learn,Achuth17/scikit-learn,JosmanPS/scikit-learn,Fireblend/scikit-learn,sumspr/scikit-learn,samuel1208/scikit-learn,aflaxman/scikit-learn,rahul-c1/scikit-learn,RachitKansal/scikit-learn,ssaeger/scikit-learn,xuewei4d/scikit-learn,clemkoa/scikit-learn,zuku1985/scikit-learn,LiaoPan/scikit-learn,DSLituiev/scikit-learn,dingocuster/scikit-learn,arahuja/scikit-learn,anurag313/scikit-learn,icdishb/scikit-learn,vermouthmjl/scikit-learn,OshynSong/scikit-learn,victorbergelin/scikit-learn,ahoyosid/scikit-learn,rsivapr/scikit-learn,djgagne/scikit-learn,anirudhjayaraman/scikit-learn,rvraghav93/scikit-learn,mojoboss/scikit-learn,cauchycui/scikit-learn,kaichogami/scikit-learn,larsmans/scikit-learn,kevin-intel/scikit-learn,YinongLong/scikit-learn,xzh86/scikit-learn,ndingwall/scikit-learn,Djabbz/scikit-learn,MartinDelzant/scikit-learn,nrhine1/scikit-learn,xiaoxiamii/scikit-learn,cauchycui/scikit-learn,tawsifkhan/scikit-learn,pratapvardhan/scikit-learn,anntzer/scikit-learn,ningchi/scikit-learn,roxyboy/scikit-learn,ilo10/scikit-learn,ashhher3/scikit-learn,thilbern/scikit-learn,pypot/scikit-learn,liyu1990/sklearn,fabianp/scikit-learn,manashmndl/scikit-learn,belltailjp/scikit-learn,yunfeilu/scikit-learn,TomDLT/scikit-learn,kjung/scikit-learn,henridwyer/scikit-learn,simon-pepin/scikit-learn,plissonf/scikit-learn,mrshu/scikit-learn,imaculate/scikit-learn,mhue/scikit-learn,untom/scikit-learn,wlamond/scikit-learn,fbagirov/scikit-learn,andaag/scikit-learn,russel1237/scikit-learn,depet/scikit-learn,3manuek/scikit-learn,davidgbe/scikit-learn,tmhm/scikit-learn,voxlol/scikit-learn,jlegendary/scikit-learn,Fireblend/scikit-learn,samzhang111/scikit-learn,madjelan/scikit-learn,mhdella/scikit-learn,jseabold/scikit-learn,trankmichael/scikit-learn,aabadie/scikit-learn,rajat1994/scikit-learn,scikit-learn/scikit-learn,spallavolu/scikit-learn,manhhomienbienthuy/scikit-learn,michigraber/scikit-learn,sinhrks/scikit-learn,kylerbrown/scikit-learn,ClimbsRocks/scikit-learn,arabenjamin/scikit-learn,fredhusser/scikit-learn,vinayak-mehta/scikit-learn,AIML/scikit-learn,heli522/scikit-learn,shangwuhencc/scikit-learn,TomDLT/scikit-learn,siutanwong/scikit-learn,Djabbz/scikit-learn,Nyker510/scikit-learn,lazywei/scikit-learn,michigraber/scikit-learn,espg/scikit-learn,Vimos/scikit-learn,shenzebang/scikit-learn,samuel1208/scikit-learn,Fireblend/scikit-learn,smartscheduling/scikit-learn-categorical-tree,nmayorov/scikit-learn,glouppe/scikit-learn,vinayak-mehta/scikit-learn,ashhher3/scikit-learn,pnedunuri/scikit-learn,DonBeo/scikit-learn,nhejazi/scikit-learn,heli522/scikit-learn,Achuth17/scikit-learn,sanketloke/scikit-learn,toastedcornflakes/scikit-learn,untom/scikit-learn,hitszxp/scikit-learn,mjgrav2001/scikit-learn,qifeigit/scikit-learn,fzalkow/scikit-learn,nesterione/scikit-learn,cdegroc/scikit-learn,shyamalschandra/scikit-learn,jmschrei/scikit-learn,Jimmy-Morzaria/scikit-learn,justincassidy/scikit-learn,joshloyal/scikit-learn,IssamLaradji/scikit-learn,treycausey/scikit-learn,fabianp/scikit-learn,jakobworldpeace/scikit-learn,JeanKossaifi/scikit-learn,Jimmy-Morzaria/scikit-learn,JPFrancoia/scikit-learn,NelisVerhoef/scikit-learn,adamgreenhall/scikit-learn,carrillo/scikit-learn,JeanKossaifi/scikit-learn,jzt5132/scikit-learn,costypetrisor/scikit-learn,schets/scikit-learn,shyamalschandra/scikit-learn,jaidevd/scikit-learn,zhenv5/scikit-learn,manashmndl/scikit-learn,robbymeals/scikit-learn,ivannz/scikit-learn,Srisai85/scikit-learn,hrjn/scikit-learn,fredhusser/scikit-learn,fyffyt/scikit-learn,AIML/scikit-learn,RomainBrault/scikit-learn,AlexanderFabisch/scikit-learn,kagayakidan/scikit-learn,CVML/scikit-learn,aminert/scikit-learn,fabianp/scikit-learn,Clyde-fare/scikit-learn,manhhomienbienthuy/scikit-learn,mxjl620/scikit-learn,Srisai85/scikit-learn,ominux/scikit-learn,zihua/scikit-learn,hugobowne/scikit-learn,ilo10/scikit-learn,smartscheduling/scikit-learn-categorical-tree,JosmanPS/scikit-learn,mattgiguere/scikit-learn,krez13/scikit-learn,nesterione/scikit-learn,cl4rke/scikit-learn,maheshakya/scikit-learn,fredhusser/scikit-learn,glemaitre/scikit-learn,waterponey/scikit-learn,IshankGulati/scikit-learn,ilo10/scikit-learn,ycaihua/scikit-learn,hitszxp/scikit-learn,smartscheduling/scikit-learn-categorical-tree,pratapvardhan/scikit-learn,shangwuhencc/scikit-learn,poryfly/scikit-learn,ky822/scikit-learn,0asa/scikit-learn,mblondel/scikit-learn,vivekmishra1991/scikit-learn,pianomania/scikit-learn,espg/scikit-learn,dsquareindia/scikit-learn,poryfly/scikit-learn,trungnt13/scikit-learn,justincassidy/scikit-learn,sarahgrogan/scikit-learn,florian-f/sklearn,TomDLT/scikit-learn,walterreade/scikit-learn,NunoEdgarGub1/scikit-learn,jakirkham/scikit-learn,voxlol/scikit-learn,anirudhjayaraman/scikit-learn,terkkila/scikit-learn,RomainBrault/scikit-learn,joshloyal/scikit-learn,beepee14/scikit-learn,idlead/scikit-learn,glemaitre/scikit-learn,vshtanko/scikit-learn,nelson-liu/scikit-learn,wlamond/scikit-learn,Aasmi/scikit-learn,jakobworldpeace/scikit-learn,aabadie/scikit-learn,belltailjp/scikit-learn,ngoix/OCRF,mattilyra/scikit-learn,arahuja/scikit-learn,rahuldhote/scikit-learn,jayflo/scikit-learn,samuel1208/scikit-learn,wzbozon/scikit-learn,simon-pepin/scikit-learn,thilbern/scikit-learn,glemaitre/scikit-learn,fabioticconi/scikit-learn,espg/scikit-learn,huzq/scikit-learn,AIML/scikit-learn,macks22/scikit-learn,ngoix/OCRF,Sentient07/scikit-learn,billy-inn/scikit-learn,lesteve/scikit-learn,xzh86/scikit-learn,0x0all/scikit-learn,mrshu/scikit-learn,jlegendary/scikit-learn,jayflo/scikit-learn,tomlof/scikit-learn,chrisburr/scikit-learn,nrhine1/scikit-learn,AnasGhrab/scikit-learn,belltailjp/scikit-learn,kmike/scikit-learn,aetilley/scikit-learn,Obus/scikit-learn,mxjl620/scikit-learn,evgchz/scikit-learn,shyamalschandra/scikit-learn,elkingtonmcb/scikit-learn,CforED/Machine-Learning,jjx02230808/project0223,IssamLaradji/scikit-learn,tosolveit/scikit-learn,andrewnc/scikit-learn,Myasuka/scikit-learn,deepesch/scikit-learn,Myasuka/scikit-learn,mikebenfield/scikit-learn,herilalaina/scikit-learn,AlexandreAbraham/scikit-learn,Barmaley-exe/scikit-learn,f3r/scikit-learn,ldirer/scikit-learn,dsullivan7/scikit-learn,zorroblue/scikit-learn,yask123/scikit-learn,lucidfrontier45/scikit-learn,hlin117/scikit-learn,justincassidy/scikit-learn,moutai/scikit-learn,pypot/scikit-learn,yanlend/scikit-learn,Clyde-fare/scikit-learn,mwv/scikit-learn,ZENGXH/scikit-learn,xwolf12/scikit-learn,murali-munna/scikit-learn,mlyundin/scikit-learn,rajat1994/scikit-learn,jblackburne/scikit-learn,Achuth17/scikit-learn,saiwing-yeung/scikit-learn,phdowling/scikit-learn,samuel1208/scikit-learn,devanshdalal/scikit-learn,yyjiang/scikit-learn,cybernet14/scikit-learn,TomDLT/scikit-learn,zuku1985/scikit-learn,mehdidc/scikit-learn,Adai0808/scikit-learn,joernhees/scikit-learn,hsiaoyi0504/scikit-learn,nmayorov/scikit-learn,murali-munna/scikit-learn,pythonvietnam/scikit-learn,ldirer/scikit-learn,victorbergelin/scikit-learn,sgenoud/scikit-learn,vigilv/scikit-learn,altairpearl/scikit-learn,mojoboss/scikit-learn,elkingtonmcb/scikit-learn,Obus/scikit-learn,rahul-c1/scikit-learn,AlexanderFabisch/scikit-learn,IndraVikas/scikit-learn,JeanKossaifi/scikit-learn,BiaDarkia/scikit-learn,walterreade/scikit-learn,huzq/scikit-learn,ycaihua/scikit-learn,cdegroc/scikit-learn,nelson-liu/scikit-learn,robbymeals/scikit-learn,pnedunuri/scikit-learn,r-mart/scikit-learn,arahuja/scikit-learn,maheshakya/scikit-learn,henridwyer/scikit-learn,Barmaley-exe/scikit-learn,IssamLaradji/scikit-learn,IshankGulati/scikit-learn,Sentient07/scikit-learn,rohanp/scikit-learn,theoryno3/scikit-learn,luo66/scikit-learn,AlexandreAbraham/scikit-learn,jayflo/scikit-learn,treycausey/scikit-learn,0x0all/scikit-learn,Achuth17/scikit-learn,siutanwong/scikit-learn,zorroblue/scikit-learn,LiaoPan/scikit-learn,depet/scikit-learn,yask123/scikit-learn,B3AU/waveTree,rrohan/scikit-learn,ankurankan/scikit-learn,JsNoNo/scikit-learn,NunoEdgarGub1/scikit-learn,alvarofierroclavero/scikit-learn,mugizico/scikit-learn,rohanp/scikit-learn,bnaul/scikit-learn,pypot/scikit-learn,kashif/scikit-learn,Adai0808/scikit-learn,cauchycui/scikit-learn,fredhusser/scikit-learn,kmike/scikit-learn,ningchi/scikit-learn,shusenl/scikit-learn,tawsifkhan/scikit-learn,mrshu/scikit-learn,ahoyosid/scikit-learn,Obus/scikit-learn,macks22/scikit-learn,ogrisel/scikit-learn,cwu2011/scikit-learn,UNR-AERIAL/scikit-learn,tosolveit/scikit-learn,mhue/scikit-learn,cainiaocome/scikit-learn,sgenoud/scikit-learn,MartinDelzant/scikit-learn,MartinSavc/scikit-learn,dsullivan7/scikit-learn,davidgbe/scikit-learn,DSLituiev/scikit-learn,loli/sklearn-ensembletrees,nmayorov/scikit-learn,fzalkow/scikit-learn,nvoron23/scikit-learn,nikitasingh981/scikit-learn,russel1237/scikit-learn,eg-zhang/scikit-learn,MatthieuBizien/scikit-learn,pompiduskus/scikit-learn,mehdidc/scikit-learn,IndraVikas/scikit-learn,ltiao/scikit-learn,clemkoa/scikit-learn,chrisburr/scikit-learn,mlyundin/scikit-learn,davidgbe/scikit-learn,etkirsch/scikit-learn,loli/semisupervisedforests,walterreade/scikit-learn,larsmans/scikit-learn,kashif/scikit-learn,RPGOne/scikit-learn,loli/semisupervisedforests,bthirion/scikit-learn,depet/scikit-learn,rahuldhote/scikit-learn,gotomypc/scikit-learn,466152112/scikit-learn,vermouthmjl/scikit-learn,pv/scikit-learn,yask123/scikit-learn,Adai0808/scikit-learn,mhdella/scikit-learn,Lawrence-Liu/scikit-learn,abhishekgahlot/scikit-learn,pkruskal/scikit-learn,arjoly/scikit-learn,henridwyer/scikit-learn,bhargav/scikit-learn,florian-f/sklearn,ishanic/scikit-learn,lazywei/scikit-learn,elkingtonmcb/scikit-learn,kylerbrown/scikit-learn,elkingtonmcb/scikit-learn,yyjiang/scikit-learn,bhargav/scikit-learn,kjung/scikit-learn,eg-zhang/scikit-learn,RPGOne/scikit-learn,betatim/scikit-learn,MechCoder/scikit-learn,ankurankan/scikit-learn,ChanderG/scikit-learn,zaxtax/scikit-learn,appapantula/scikit-learn,LohithBlaze/scikit-learn,florian-f/sklearn,ogrisel/scikit-learn,rahuldhote/scikit-learn,manashmndl/scikit-learn,pianomania/scikit-learn,mfjb/scikit-learn,schets/scikit-learn,zihua/scikit-learn,yyjiang/scikit-learn,PatrickOReilly/scikit-learn,scikit-learn/scikit-learn,Akshay0724/scikit-learn,bigdataelephants/scikit-learn,RayMick/scikit-learn,mxjl620/scikit-learn,shangwuhencc/scikit-learn,fbagirov/scikit-learn,icdishb/scikit-learn,CforED/Machine-Learning,mfjb/scikit-learn,HolgerPeters/scikit-learn,ephes/scikit-learn,PatrickOReilly/scikit-learn,Lawrence-Liu/scikit-learn,robin-lai/scikit-learn,jpautom/scikit-learn,aewhatley/scikit-learn,jm-begon/scikit-learn,MatthieuBizien/scikit-learn,Fireblend/scikit-learn,ZenDevelopmentSystems/scikit-learn,Srisai85/scikit-learn,cl4rke/scikit-learn,jorge2703/scikit-learn,hitszxp/scikit-learn,potash/scikit-learn,jblackburne/scikit-learn,andaag/scikit-learn,liangz0707/scikit-learn,phdowling/scikit-learn,mhdella/scikit-learn,rahul-c1/scikit-learn,h2educ/scikit-learn,ishanic/scikit-learn,rsivapr/scikit-learn,JsNoNo/scikit-learn,anurag313/scikit-learn,waterponey/scikit-learn,shenzebang/scikit-learn,heli522/scikit-learn,gclenaghan/scikit-learn,potash/scikit-learn,PatrickOReilly/scikit-learn,ElDeveloper/scikit-learn,kaichogami/scikit-learn,abimannans/scikit-learn,pnedunuri/scikit-learn,kashif/scikit-learn,ndingwall/scikit-learn,ishanic/scikit-learn,mattilyra/scikit-learn,AlexRobson/scikit-learn,treycausey/scikit-learn,AlexRobson/scikit-learn,bigdataelephants/scikit-learn,siutanwong/scikit-learn,evgchz/scikit-learn,alvarofierroclavero/scikit-learn,henrykironde/scikit-learn,anntzer/scikit-learn,imaculate/scikit-learn,petosegan/scikit-learn,eickenberg/scikit-learn,nhejazi/scikit-learn,fyffyt/scikit-learn,zhenv5/scikit-learn,giorgiop/scikit-learn,Garrett-R/scikit-learn,michigraber/scikit-learn,ClimbsRocks/scikit-learn,mehdidc/scikit-learn,terkkila/scikit-learn,zorojean/scikit-learn,bnaul/scikit-learn,kaichogami/scikit-learn,Aasmi/scikit-learn,eickenberg/scikit-learn,alexeyum/scikit-learn,themrmax/scikit-learn,ahoyosid/scikit-learn,JPFrancoia/scikit-learn,lbishal/scikit-learn,yyjiang/scikit-learn,qifeigit/scikit-learn,stylianos-kampakis/scikit-learn,robbymeals/scikit-learn,r-mart/scikit-learn,shikhardb/scikit-learn,0x0all/scikit-learn,fengzhyuan/scikit-learn,gclenaghan/scikit-learn,fyffyt/scikit-learn,wanggang3333/scikit-learn,Titan-C/scikit-learn,alexeyum/scikit-learn,sanketloke/scikit-learn,amueller/scikit-learn,vigilv/scikit-learn,RayMick/scikit-learn,siutanwong/scikit-learn,Titan-C/scikit-learn,gclenaghan/scikit-learn,jereze/scikit-learn,glouppe/scikit-learn,vibhorag/scikit-learn,hlin117/scikit-learn,trungnt13/scikit-learn,mikebenfield/scikit-learn,shikhardb/scikit-learn,Obus/scikit-learn,CVML/scikit-learn,mfjb/scikit-learn,jereze/scikit-learn,jjx02230808/project0223,chrsrds/scikit-learn,akionakamura/scikit-learn,hugobowne/scikit-learn,hitszxp/scikit-learn,adamgreenhall/scikit-learn,glemaitre/scikit-learn,kjung/scikit-learn,lbishal/scikit-learn,zorroblue/scikit-learn,Clyde-fare/scikit-learn,vermouthmjl/scikit-learn,Adai0808/scikit-learn,Akshay0724/scikit-learn,herilalaina/scikit-learn,anntzer/scikit-learn,shyamalschandra/scikit-learn,liangz0707/scikit-learn,anirudhjayaraman/scikit-learn,PatrickChrist/scikit-learn,vibhorag/scikit-learn,fyffyt/scikit-learn,MatthieuBizien/scikit-learn,AnasGhrab/scikit-learn,lesteve/scikit-learn,espg/scikit-learn,vortex-ape/scikit-learn,JosmanPS/scikit-learn,victorbergelin/scikit-learn,procoder317/scikit-learn,zihua/scikit-learn,djgagne/scikit-learn,rvraghav93/scikit-learn,MartinSavc/scikit-learn,xzh86/scikit-learn,sonnyhu/scikit-learn,xwolf12/scikit-learn,iismd17/scikit-learn,shikhardb/scikit-learn,theoryno3/scikit-learn,vivekmishra1991/scikit-learn,q1ang/scikit-learn,abimannans/scikit-learn,yonglehou/scikit-learn,MechCoder/scikit-learn,ilyes14/scikit-learn,themrmax/scikit-learn,meduz/scikit-learn,scikit-learn/scikit-learn,sergeyf/scikit-learn,chrsrds/scikit-learn,spallavolu/scikit-learn,aminert/scikit-learn,YinongLong/scikit-learn,andrewnc/scikit-learn,mikebenfield/scikit-learn,carrillo/scikit-learn,tawsifkhan/scikit-learn,mblondel/scikit-learn,ChanderG/scikit-learn,0asa/scikit-learn,B3AU/waveTree,iismd17/scikit-learn,ZENGXH/scikit-learn,raghavrv/scikit-learn,krez13/scikit-learn,0x0all/scikit-learn,MechCoder/scikit-learn,Barmaley-exe/scikit-learn,xyguo/scikit-learn,sumspr/scikit-learn,IndraVikas/scikit-learn,bnaul/scikit-learn,anirudhjayaraman/scikit-learn,hrjn/scikit-learn,lucidfrontier45/scikit-learn,xubenben/scikit-learn,aabadie/scikit-learn,fzalkow/scikit-learn,ephes/scikit-learn,ashhher3/scikit-learn,hdmetor/scikit-learn,appapantula/scikit-learn,PrashntS/scikit-learn,jlegendary/scikit-learn,nomadcube/scikit-learn,IssamLaradji/scikit-learn,sgenoud/scikit-learn,hugobowne/scikit-learn,jakobworldpeace/scikit-learn,xyguo/scikit-learn,mwv/scikit-learn,jakobworldpeace/scikit-learn,xavierwu/scikit-learn,tawsifkhan/scikit-learn,ChanderG/scikit-learn,lin-credible/scikit-learn,jzt5132/scikit-learn,samzhang111/scikit-learn,ky822/scikit-learn,khkaminska/scikit-learn,henrykironde/scikit-learn,mjudsp/Tsallis,jakirkham/scikit-learn,tosolveit/scikit-learn,costypetrisor/scikit-learn,hainm/scikit-learn,tmhm/scikit-learn,ldirer/scikit-learn,nrhine1/scikit-learn,petosegan/scikit-learn,untom/scikit-learn,sanketloke/scikit-learn,abhishekgahlot/scikit-learn,aminert/scikit-learn,mayblue9/scikit-learn,iismd17/scikit-learn,NunoEdgarGub1/scikit-learn,rahul-c1/scikit-learn,RayMick/scikit-learn,arjoly/scikit-learn,sarahgrogan/scikit-learn,AIML/scikit-learn,PatrickOReilly/scikit-learn,sergeyf/scikit-learn,krez13/scikit-learn,xyguo/scikit-learn,arahuja/scikit-learn,f3r/scikit-learn,fbagirov/scikit-learn,IshankGulati/scikit-learn,tmhm/scikit-learn,zhenv5/scikit-learn,ilyes14/scikit-learn,fengzhyuan/scikit-learn,olologin/scikit-learn,nvoron23/scikit-learn,wzbozon/scikit-learn,hitszxp/scikit-learn,jereze/scikit-learn,madjelan/scikit-learn,heli522/scikit-learn,khkaminska/scikit-learn,etkirsch/scikit-learn,gotomypc/scikit-learn,MartinSavc/scikit-learn,abhishekgahlot/scikit-learn,kagayakidan/scikit-learn,lesteve/scikit-learn,lesteve/scikit-learn,altairpearl/scikit-learn,mayblue9/scikit-learn,MohammedWasim/scikit-learn,giorgiop/scikit-learn,ivannz/scikit-learn,deepesch/scikit-learn,bigdataelephants/scikit-learn,liberatorqjw/scikit-learn,cl4rke/scikit-learn,loli/sklearn-ensembletrees,jzt5132/scikit-learn,RachitKansal/scikit-learn,lazywei/scikit-learn,PrashntS/scikit-learn,billy-inn/scikit-learn,jaidevd/scikit-learn,Aasmi/scikit-learn,wanggang3333/scikit-learn,macks22/scikit-learn,vermouthmjl/scikit-learn,kmike/scikit-learn,f3r/scikit-learn,chrsrds/scikit-learn,ZENGXH/scikit-learn,macks22/scikit-learn,aetilley/scikit-learn,ngoix/OCRF,mjudsp/Tsallis,Myasuka/scikit-learn,trankmichael/scikit-learn,ElDeveloper/scikit-learn,luo66/scikit-learn,wanggang3333/scikit-learn,frank-tancf/scikit-learn,trungnt13/scikit-learn,cybernet14/scikit-learn,dsquareindia/scikit-learn,alexsavio/scikit-learn,wzbozon/scikit-learn,toastedcornflakes/scikit-learn,cybernet14/scikit-learn,beepee14/scikit-learn,lazywei/scikit-learn,adamgreenhall/scikit-learn,liberatorqjw/scikit-learn,wazeerzulfikar/scikit-learn,DonBeo/scikit-learn,jm-begon/scikit-learn,MohammedWasim/scikit-learn,glennq/scikit-learn,harshaneelhg/scikit-learn,LiaoPan/scikit-learn,equialgo/scikit-learn,djgagne/scikit-learn,jjx02230808/project0223,r-mart/scikit-learn,bthirion/scikit-learn,Barmaley-exe/scikit-learn,tdhopper/scikit-learn,pythonvietnam/scikit-learn,meduz/scikit-learn,olologin/scikit-learn,LohithBlaze/scikit-learn,ankurankan/scikit-learn,altairpearl/scikit-learn,hrjn/scikit-learn,evgchz/scikit-learn,0asa/scikit-learn,xavierwu/scikit-learn,shahankhatch/scikit-learn,JosmanPS/scikit-learn,nhejazi/scikit-learn,etkirsch/scikit-learn,abhishekgahlot/scikit-learn,alexsavio/scikit-learn,aewhatley/scikit-learn,anurag313/scikit-learn,lenovor/scikit-learn,loli/sklearn-ensembletrees,tdhopper/scikit-learn,OshynSong/scikit-learn,ltiao/scikit-learn,pkruskal/scikit-learn,quheng/scikit-learn,procoder317/scikit-learn,btabibian/scikit-learn,rajat1994/scikit-learn,kylerbrown/scikit-learn,hugobowne/scikit-learn,mugizico/scikit-learn,DSLituiev/scikit-learn,nhejazi/scikit-learn,dsquareindia/scikit-learn,PrashntS/scikit-learn,ChanChiChoi/scikit-learn,ephes/scikit-learn,alexsavio/scikit-learn,scikit-learn/scikit-learn,saiwing-yeung/scikit-learn,pianomania/scikit-learn,thientu/scikit-learn,vortex-ape/scikit-learn,hlin117/scikit-learn,q1ang/scikit-learn,tdhopper/scikit-learn,aewhatley/scikit-learn,kjung/scikit-learn,hrjn/scikit-learn,mjudsp/Tsallis,jmetzen/scikit-learn,zorojean/scikit-learn,abimannans/scikit-learn,fzalkow/scikit-learn,PrashntS/scikit-learn,ivannz/scikit-learn,hsuantien/scikit-learn,stylianos-kampakis/scikit-learn,equialgo/scikit-learn,thilbern/scikit-learn,joernhees/scikit-learn,B3AU/waveTree,ZENGXH/scikit-learn,r-mart/scikit-learn,mblondel/scikit-learn,harshaneelhg/scikit-learn,pkruskal/scikit-learn,IshankGulati/scikit-learn,djgagne/scikit-learn,liangz0707/scikit-learn,eickenberg/scikit-learn,jaidevd/scikit-learn,pratapvardhan/scikit-learn,nomadcube/scikit-learn,alexeyum/scikit-learn,devanshdalal/scikit-learn,pv/scikit-learn,nikitasingh981/scikit-learn,fengzhyuan/scikit-learn,sumspr/scikit-learn,kylerbrown/scikit-learn,hainm/scikit-learn,zaxtax/scikit-learn,sonnyhu/scikit-learn,Lawrence-Liu/scikit-learn,vinayak-mehta/scikit-learn,saiwing-yeung/scikit-learn,maheshakya/scikit-learn,zhenv5/scikit-learn,etkirsch/scikit-learn,AlexandreAbraham/scikit-learn,UNR-AERIAL/scikit-learn,larsmans/scikit-learn,billy-inn/scikit-learn,aewhatley/scikit-learn,akionakamura/scikit-learn,Aasmi/scikit-learn,lin-credible/scikit-learn,mikebenfield/scikit-learn,rexshihaoren/scikit-learn,rishikksh20/scikit-learn,gotomypc/scikit-learn,rahuldhote/scikit-learn,marcocaccin/scikit-learn,frank-tancf/scikit-learn,nikitasingh981/scikit-learn,clemkoa/scikit-learn,vivekmishra1991/scikit-learn,ndingwall/scikit-learn,stylianos-kampakis/scikit-learn,glouppe/scikit-learn,theoryno3/scikit-learn,ngoix/OCRF,RayMick/scikit-learn,plissonf/scikit-learn,RPGOne/scikit-learn,ankurankan/scikit-learn,ZenDevelopmentSystems/scikit-learn,dhruv13J/scikit-learn,herilalaina/scikit-learn,lucidfrontier45/scikit-learn,zihua/scikit-learn,xiaoxiamii/scikit-learn,Clyde-fare/scikit-learn,MechCoder/scikit-learn,mblondel/scikit-learn,bnaul/scikit-learn,CforED/Machine-Learning,zuku1985/scikit-learn,abimannans/scikit-learn,roxyboy/scikit-learn,rajat1994/scikit-learn,wazeerzulfikar/scikit-learn,Djabbz/scikit-learn,roxyboy/scikit-learn,xwolf12/scikit-learn,dhruv13J/scikit-learn,CVML/scikit-learn,jseabold/scikit-learn,trankmichael/scikit-learn,wazeerzulfikar/scikit-learn,shusenl/scikit-learn,yanlend/scikit-learn,rsivapr/scikit-learn,robin-lai/scikit-learn,ClimbsRocks/scikit-learn,jkarnows/scikit-learn,fabianp/scikit-learn,nvoron23/scikit-learn,spallavolu/scikit-learn,0asa/scikit-learn,vshtanko/scikit-learn,samzhang111/scikit-learn,pratapvardhan/scikit-learn,jblackburne/scikit-learn,frank-tancf/scikit-learn,abhishekkrthakur/scikit-learn,jm-begon/scikit-learn,vibhorag/scikit-learn,robbymeals/scikit-learn,cdegroc/scikit-learn,betatim/scikit-learn,Garrett-R/scikit-learn,icdishb/scikit-learn,LohithBlaze/scikit-learn,michigraber/scikit-learn,vshtanko/scikit-learn,sumspr/scikit-learn,nelson-liu/scikit-learn,justincassidy/scikit-learn,mrshu/scikit-learn,pianomania/scikit-learn,aetilley/scikit-learn,massmutual/scikit-learn,dingocuster/scikit-learn,eg-zhang/scikit-learn,pkruskal/scikit-learn,imaculate/scikit-learn,themrmax/scikit-learn,poryfly/scikit-learn,jblackburne/scikit-learn,depet/scikit-learn,JsNoNo/scikit-learn,mrshu/scikit-learn,rishikksh20/scikit-learn,NelisVerhoef/scikit-learn,Vimos/scikit-learn,JeanKossaifi/scikit-learn,Vimos/scikit-learn,wlamond/scikit-learn,jmetzen/scikit-learn,procoder317/scikit-learn,carrillo/scikit-learn,petosegan/scikit-learn,466152112/scikit-learn,AnasGhrab/scikit-learn,DonBeo/scikit-learn,ky822/scikit-learn,huobaowangxi/scikit-learn,voxlol/scikit-learn,vigilv/scikit-learn,rexshihaoren/scikit-learn,ndingwall/scikit-learn,poryfly/scikit-learn,shahankhatch/scikit-learn,lenovor/scikit-learn,eg-zhang/scikit-learn,rexshihaoren/scikit-learn,dsullivan7/scikit-learn,dsquareindia/scikit-learn,bigdataelephants/scikit-learn,loli/sklearn-ensembletrees,mojoboss/scikit-learn,terkkila/scikit-learn,glouppe/scikit-learn,jmschrei/scikit-learn,f3r/scikit-learn,theoryno3/scikit-learn,mattgiguere/scikit-learn,ssaeger/scikit-learn,rrohan/scikit-learn,fabioticconi/scikit-learn,ssaeger/scikit-learn,yask123/scikit-learn,thientu/scikit-learn,shusenl/scikit-learn,mjudsp/Tsallis,466152112/scikit-learn,voxlol/scikit-learn,AnasGhrab/scikit-learn,quheng/scikit-learn,xavierwu/scikit-learn,CforED/Machine-Learning,RachitKansal/scikit-learn,Djabbz/scikit-learn,wanggang3333/scikit-learn,xiaoxiamii/scikit-learn,amueller/scikit-learn,eickenberg/scikit-learn,kevin-intel/scikit-learn,shenzebang/scikit-learn,victorbergelin/scikit-learn,aflaxman/scikit-learn,jorik041/scikit-learn,q1ang/scikit-learn,toastedcornflakes/scikit-learn,arjoly/scikit-learn,fabioticconi/scikit-learn,quheng/scikit-learn,wzbozon/scikit-learn,quheng/scikit-learn,ogrisel/scikit-learn,equialgo/scikit-learn,davidgbe/scikit-learn,ldirer/scikit-learn,joernhees/scikit-learn,h2educ/scikit-learn,Windy-Ground/scikit-learn,HolgerPeters/scikit-learn,0x0all/scikit-learn,maheshakya/scikit-learn,ltiao/scikit-learn,Nyker510/scikit-learn,vortex-ape/scikit-learn,Windy-Ground/scikit-learn,cdegroc/scikit-learn,toastedcornflakes/scikit-learn,betatim/scikit-learn,gclenaghan/scikit-learn,sinhrks/scikit-learn,jzt5132/scikit-learn,PatrickChrist/scikit-learn,raghavrv/scikit-learn,RomainBrault/scikit-learn,vigilv/scikit-learn,pv/scikit-learn,luo66/scikit-learn,treycausey/scikit-learn,mugizico/scikit-learn,abhishekgahlot/scikit-learn,hsuantien/scikit-learn,wlamond/scikit-learn,arabenjamin/scikit-learn,giorgiop/scikit-learn,rvraghav93/scikit-learn,xuewei4d/scikit-learn,ogrisel/scikit-learn
import numpy as np import nose + from ..base import BaseEstimator from .. import cross_val + + class MockClassifier(BaseEstimator): + """Dummy classifier to test the cross-validation + + """ + def __init__(self, a=0): + self.a = a + + def fit(self, X, Y, **params): + self._set_params(**params) + return self + + def predict(self, T): + return T.shape[0] + + def score(self, X=None, Y=None): + return 1./(1+np.abs(self.a)) + + + X = np.ones((10, 2)) + y = np.arange(10)/2 + + ################################################################################ + # Tests def test_kfold(): # Check that errors are raise if there is not enough samples nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3) y = [0, 0, 1, 1, 2] nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3) + def test_cross_val_score(): + clf = MockClassifier() + for a in range(-10, 10): + clf.a = a + # Smoke test + score = cross_val.cross_val_score(clf, X, y) + np.testing.assert_array_equal(score, clf.score(X, y)) + +
Add a smoke test for cross_val_score
## Code Before: import numpy as np import nose from .. import cross_val def test_kfold(): # Check that errors are raise if there is not enough samples nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3) y = [0, 0, 1, 1, 2] nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3) ## Instruction: Add a smoke test for cross_val_score ## Code After: import numpy as np import nose from ..base import BaseEstimator from .. import cross_val class MockClassifier(BaseEstimator): """Dummy classifier to test the cross-validation """ def __init__(self, a=0): self.a = a def fit(self, X, Y, **params): self._set_params(**params) return self def predict(self, T): return T.shape[0] def score(self, X=None, Y=None): return 1./(1+np.abs(self.a)) X = np.ones((10, 2)) y = np.arange(10)/2 ################################################################################ # Tests def test_kfold(): # Check that errors are raise if there is not enough samples nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3) y = [0, 0, 1, 1, 2] nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3) def test_cross_val_score(): clf = MockClassifier() for a in range(-10, 10): clf.a = a # Smoke test score = cross_val.cross_val_score(clf, X, y) np.testing.assert_array_equal(score, clf.score(X, y))
import numpy as np import nose + from ..base import BaseEstimator from .. import cross_val + + class MockClassifier(BaseEstimator): + """Dummy classifier to test the cross-validation + + """ + def __init__(self, a=0): + self.a = a + + def fit(self, X, Y, **params): + self._set_params(**params) + return self + + def predict(self, T): + return T.shape[0] + + def score(self, X=None, Y=None): + return 1./(1+np.abs(self.a)) + + + X = np.ones((10, 2)) + y = np.arange(10)/2 + + ################################################################################ + # Tests def test_kfold(): # Check that errors are raise if there is not enough samples nose.tools.assert_raises(AssertionError, cross_val.KFold, 3, 3) y = [0, 0, 1, 1, 2] nose.tools.assert_raises(AssertionError, cross_val.StratifiedKFold, y, 3) + + def test_cross_val_score(): + clf = MockClassifier() + for a in range(-10, 10): + clf.a = a + # Smoke test + score = cross_val.cross_val_score(clf, X, y) + np.testing.assert_array_equal(score, clf.score(X, y)) +
906a5ee2b6e20b09b12d36d61271cd63cac49418
py2pack/utils.py
py2pack/utils.py
from typing import List # noqa: F401, pylint: disable=unused-import import tarfile import zipfile def _get_archive_filelist(filename): # type: (str) -> List[str] names = [] # type: List[str] if tarfile.is_tarfile(filename): with tarfile.open(filename) as tar_file: names = sorted(tar_file.getnames()) elif zipfile.is_zipfile(filename): with zipfile.ZipFile(filename) as zip_file: names = sorted(zip_file.namelist()) else: raise Exception("Can not get filenames from '%s'. " "Not a tar or zip file" % filename) if "./" in names: names.remove("./") return names
from typing import List # noqa: F401, pylint: disable=unused-import import tarfile import zipfile def _get_archive_filelist(filename): # type: (str) -> List[str] names = [] # type: List[str] if tarfile.is_tarfile(filename): with tarfile.open(filename) as tar_file: names = sorted(tar_file.getnames()) elif zipfile.is_zipfile(filename): with zipfile.ZipFile(filename) as zip_file: names = sorted(zip_file.namelist()) else: raise ValueError("Can not get filenames from '{!s}'. " "Not a tar or zip file".format(filename)) if "./" in names: names.remove("./") return names
Raise a ValueError from _get_archive_filelist instead of Exception
Raise a ValueError from _get_archive_filelist instead of Exception Raising the Exception base class is considered bad style, as the more specialized child classes carry more information about the kind of error that occurred. And often no-one actually tries to catch the Exception class.
Python
apache-2.0
saschpe/py2pack
from typing import List # noqa: F401, pylint: disable=unused-import import tarfile import zipfile def _get_archive_filelist(filename): # type: (str) -> List[str] names = [] # type: List[str] if tarfile.is_tarfile(filename): with tarfile.open(filename) as tar_file: names = sorted(tar_file.getnames()) elif zipfile.is_zipfile(filename): with zipfile.ZipFile(filename) as zip_file: names = sorted(zip_file.namelist()) else: - raise Exception("Can not get filenames from '%s'. " + raise ValueError("Can not get filenames from '{!s}'. " - "Not a tar or zip file" % filename) + "Not a tar or zip file".format(filename)) if "./" in names: names.remove("./") return names
Raise a ValueError from _get_archive_filelist instead of Exception
## Code Before: from typing import List # noqa: F401, pylint: disable=unused-import import tarfile import zipfile def _get_archive_filelist(filename): # type: (str) -> List[str] names = [] # type: List[str] if tarfile.is_tarfile(filename): with tarfile.open(filename) as tar_file: names = sorted(tar_file.getnames()) elif zipfile.is_zipfile(filename): with zipfile.ZipFile(filename) as zip_file: names = sorted(zip_file.namelist()) else: raise Exception("Can not get filenames from '%s'. " "Not a tar or zip file" % filename) if "./" in names: names.remove("./") return names ## Instruction: Raise a ValueError from _get_archive_filelist instead of Exception ## Code After: from typing import List # noqa: F401, pylint: disable=unused-import import tarfile import zipfile def _get_archive_filelist(filename): # type: (str) -> List[str] names = [] # type: List[str] if tarfile.is_tarfile(filename): with tarfile.open(filename) as tar_file: names = sorted(tar_file.getnames()) elif zipfile.is_zipfile(filename): with zipfile.ZipFile(filename) as zip_file: names = sorted(zip_file.namelist()) else: raise ValueError("Can not get filenames from '{!s}'. " "Not a tar or zip file".format(filename)) if "./" in names: names.remove("./") return names
from typing import List # noqa: F401, pylint: disable=unused-import import tarfile import zipfile def _get_archive_filelist(filename): # type: (str) -> List[str] names = [] # type: List[str] if tarfile.is_tarfile(filename): with tarfile.open(filename) as tar_file: names = sorted(tar_file.getnames()) elif zipfile.is_zipfile(filename): with zipfile.ZipFile(filename) as zip_file: names = sorted(zip_file.namelist()) else: - raise Exception("Can not get filenames from '%s'. " ? ^^^^^^ ^ ^ + raise ValueError("Can not get filenames from '{!s}'. " ? +++++ ^^ ^ ^^ + - "Not a tar or zip file" % filename) ? ^^^ + "Not a tar or zip file".format(filename)) ? + ^^^^^^^^ + if "./" in names: names.remove("./") return names
457f2daeb087ab06d7cb738cb69268bad29d11f4
examples/mhs_atmosphere/mhs_atmosphere_plot.py
examples/mhs_atmosphere/mhs_atmosphere_plot.py
import os import glob import yt model = 'spruit' datadir = os.path.expanduser('~/mhs_atmosphere/'+model+'/') files = glob.glob(datadir+'/*') files.sort() print(files) ds = yt.load(files[0]) slc = yt.SlicePlot(ds, normal='y', fields='density_bg') slc.save('~/yt.png')
import os import glob import yt model = 'spruit' datadir = os.path.expanduser('~/mhs_atmosphere/'+model+'/') files = glob.glob(datadir+'/*') files.sort() print(files) ds = yt.load(files[0]) # uncomment for axis swapping for normal='y' ds.coordinates.x_axis = {0: 2, 1: 0, 2: 1, 'x': 2, 'y': 0, 'z': 1} ds.coordinates.y_axis = {0: 1, 1: 2, 2: 0, 'x': 1, 'y': 2, 'z': 0} slc = yt.SlicePlot(ds, normal='y', fields='density_bg') slc.save('~/yt.png')
Add in axes swapping for normal='y'
Add in axes swapping for normal='y'
Python
bsd-2-clause
SWAT-Sheffield/pysac,Cadair/pysac
import os import glob import yt model = 'spruit' datadir = os.path.expanduser('~/mhs_atmosphere/'+model+'/') files = glob.glob(datadir+'/*') files.sort() print(files) ds = yt.load(files[0]) + # uncomment for axis swapping for normal='y' + ds.coordinates.x_axis = {0: 2, 1: 0, 2: 1, 'x': 2, 'y': 0, 'z': 1} + ds.coordinates.y_axis = {0: 1, 1: 2, 2: 0, 'x': 1, 'y': 2, 'z': 0} + slc = yt.SlicePlot(ds, normal='y', fields='density_bg') slc.save('~/yt.png')
Add in axes swapping for normal='y'
## Code Before: import os import glob import yt model = 'spruit' datadir = os.path.expanduser('~/mhs_atmosphere/'+model+'/') files = glob.glob(datadir+'/*') files.sort() print(files) ds = yt.load(files[0]) slc = yt.SlicePlot(ds, normal='y', fields='density_bg') slc.save('~/yt.png') ## Instruction: Add in axes swapping for normal='y' ## Code After: import os import glob import yt model = 'spruit' datadir = os.path.expanduser('~/mhs_atmosphere/'+model+'/') files = glob.glob(datadir+'/*') files.sort() print(files) ds = yt.load(files[0]) # uncomment for axis swapping for normal='y' ds.coordinates.x_axis = {0: 2, 1: 0, 2: 1, 'x': 2, 'y': 0, 'z': 1} ds.coordinates.y_axis = {0: 1, 1: 2, 2: 0, 'x': 1, 'y': 2, 'z': 0} slc = yt.SlicePlot(ds, normal='y', fields='density_bg') slc.save('~/yt.png')
import os import glob import yt model = 'spruit' datadir = os.path.expanduser('~/mhs_atmosphere/'+model+'/') files = glob.glob(datadir+'/*') files.sort() print(files) ds = yt.load(files[0]) + # uncomment for axis swapping for normal='y' + ds.coordinates.x_axis = {0: 2, 1: 0, 2: 1, 'x': 2, 'y': 0, 'z': 1} + ds.coordinates.y_axis = {0: 1, 1: 2, 2: 0, 'x': 1, 'y': 2, 'z': 0} + slc = yt.SlicePlot(ds, normal='y', fields='density_bg') slc.save('~/yt.png')
bdcf90a0fdf782b1c6cfd261e0dbb208e013eb1b
python/day12.py
python/day12.py
import json import pathlib input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt') def sum_data(d): total = 0 if isinstance(d, dict): d = d.values() for item in d: if isinstance(item, int): total += item elif isinstance(item, (list, dict)): total += sum_data(item) else: continue # Some other type we’re not interested in return total def sum_json(raw_json): parsed = json.loads(raw_json) return sum_data(parsed) def test_simple(): assert sum_json('[1,2,3]') == 6 assert sum_json('{"a":2,"b":4}') == 6 def test_nested(): assert sum_json('[[[3]]]') == 3 assert sum_json('{"a":{"b":4},"c":-1}') == 3 def test_mixed(): assert sum_json('{"a":[-1,1]}') == 0 assert sum_json('[-1,{"a":1}]') == 0 def test_empty(): assert sum_json('[]') == 0 assert sum_json('{}') == 0 if __name__ == '__main__': with open(input_file) as json_file: json_data = json_file.read() print(sum_json(json_data))
import json import pathlib input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt') def sum_data(d): total = 0 if isinstance(d, dict): d = d.values() if 'red' in d: return 0 for item in d: if isinstance(item, int): total += item elif isinstance(item, (list, dict)): total += sum_data(item) else: continue # Some other type we’re not interested in return total def sum_json(raw_json): parsed = json.loads(raw_json) return sum_data(parsed) def test_simple(): assert sum_json('[1,2,3]') == 6 assert sum_json('{"a":2,"b":4}') == 6 def test_nested(): assert sum_json('[[[3]]]') == 3 assert sum_json('{"a":{"b":4},"c":-1}') == 3 def test_mixed(): assert sum_json('{"a":[-1,1]}') == 0 assert sum_json('[-1,{"a":1}]') == 0 def test_empty(): assert sum_json('[]') == 0 assert sum_json('{}') == 0 if __name__ == '__main__': with open(input_file) as json_file: json_data = json_file.read() print(sum_json(json_data))
Add day 12 part two solution in python
Add day 12 part two solution in python
Python
mit
robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions
import json import pathlib input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt') def sum_data(d): total = 0 if isinstance(d, dict): d = d.values() + if 'red' in d: return 0 for item in d: if isinstance(item, int): total += item elif isinstance(item, (list, dict)): total += sum_data(item) else: continue # Some other type we’re not interested in return total def sum_json(raw_json): parsed = json.loads(raw_json) return sum_data(parsed) def test_simple(): assert sum_json('[1,2,3]') == 6 assert sum_json('{"a":2,"b":4}') == 6 def test_nested(): assert sum_json('[[[3]]]') == 3 assert sum_json('{"a":{"b":4},"c":-1}') == 3 def test_mixed(): assert sum_json('{"a":[-1,1]}') == 0 assert sum_json('[-1,{"a":1}]') == 0 def test_empty(): assert sum_json('[]') == 0 assert sum_json('{}') == 0 if __name__ == '__main__': with open(input_file) as json_file: json_data = json_file.read() print(sum_json(json_data))
Add day 12 part two solution in python
## Code Before: import json import pathlib input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt') def sum_data(d): total = 0 if isinstance(d, dict): d = d.values() for item in d: if isinstance(item, int): total += item elif isinstance(item, (list, dict)): total += sum_data(item) else: continue # Some other type we’re not interested in return total def sum_json(raw_json): parsed = json.loads(raw_json) return sum_data(parsed) def test_simple(): assert sum_json('[1,2,3]') == 6 assert sum_json('{"a":2,"b":4}') == 6 def test_nested(): assert sum_json('[[[3]]]') == 3 assert sum_json('{"a":{"b":4},"c":-1}') == 3 def test_mixed(): assert sum_json('{"a":[-1,1]}') == 0 assert sum_json('[-1,{"a":1}]') == 0 def test_empty(): assert sum_json('[]') == 0 assert sum_json('{}') == 0 if __name__ == '__main__': with open(input_file) as json_file: json_data = json_file.read() print(sum_json(json_data)) ## Instruction: Add day 12 part two solution in python ## Code After: import json import pathlib input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt') def sum_data(d): total = 0 if isinstance(d, dict): d = d.values() if 'red' in d: return 0 for item in d: if isinstance(item, int): total += item elif isinstance(item, (list, dict)): total += sum_data(item) else: continue # Some other type we’re not interested in return total def sum_json(raw_json): parsed = json.loads(raw_json) return sum_data(parsed) def test_simple(): assert sum_json('[1,2,3]') == 6 assert sum_json('{"a":2,"b":4}') == 6 def test_nested(): assert sum_json('[[[3]]]') == 3 assert sum_json('{"a":{"b":4},"c":-1}') == 3 def test_mixed(): assert sum_json('{"a":[-1,1]}') == 0 assert sum_json('[-1,{"a":1}]') == 0 def test_empty(): assert sum_json('[]') == 0 assert sum_json('{}') == 0 if __name__ == '__main__': with open(input_file) as json_file: json_data = json_file.read() print(sum_json(json_data))
import json import pathlib input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt') def sum_data(d): total = 0 if isinstance(d, dict): d = d.values() + if 'red' in d: return 0 for item in d: if isinstance(item, int): total += item elif isinstance(item, (list, dict)): total += sum_data(item) else: continue # Some other type we’re not interested in return total def sum_json(raw_json): parsed = json.loads(raw_json) return sum_data(parsed) def test_simple(): assert sum_json('[1,2,3]') == 6 assert sum_json('{"a":2,"b":4}') == 6 def test_nested(): assert sum_json('[[[3]]]') == 3 assert sum_json('{"a":{"b":4},"c":-1}') == 3 def test_mixed(): assert sum_json('{"a":[-1,1]}') == 0 assert sum_json('[-1,{"a":1}]') == 0 def test_empty(): assert sum_json('[]') == 0 assert sum_json('{}') == 0 if __name__ == '__main__': with open(input_file) as json_file: json_data = json_file.read() print(sum_json(json_data))
75615b2328e521b6bb37321d1cd7dc75c4d3bfef
hecate/core/topology/border.py
hecate/core/topology/border.py
from hecate.core.topology.mixins import DimensionsMixin class Border(DimensionsMixin): """ Base class for all types of borders. """ def __init__(self): self.topology = None class TorusBorder(Border): supported_dimensions = list(range(1, 100)) def wrap_coords(self, coord_prefix): code = "" for i in range(self.dimensions): code += "{x}{i} %= {w}{i};\n".format( x=coord_prefix, i=i, w=self.topology.lattice.width_prefix ) return code
from hecate.core.topology.mixins import DimensionsMixin class Border(DimensionsMixin): """ Base class for all types of borders. """ def __init__(self): self.topology = None class TorusBorder(Border): supported_dimensions = list(range(1, 100)) def wrap_coords(self, coord_prefix): code = "" for i in range(self.dimensions): code += "{x}{i} = ({x}{i} + {w}{i}) % {w}{i};\n".format( x=coord_prefix, i=i, w=self.topology.lattice.width_prefix ) return code
Fix incorrect TorusBorder wrapping in negative direction
Fix incorrect TorusBorder wrapping in negative direction
Python
mit
a5kin/hecate,a5kin/hecate
from hecate.core.topology.mixins import DimensionsMixin class Border(DimensionsMixin): """ Base class for all types of borders. """ def __init__(self): self.topology = None class TorusBorder(Border): supported_dimensions = list(range(1, 100)) def wrap_coords(self, coord_prefix): code = "" for i in range(self.dimensions): - code += "{x}{i} %= {w}{i};\n".format( + code += "{x}{i} = ({x}{i} + {w}{i}) % {w}{i};\n".format( x=coord_prefix, i=i, w=self.topology.lattice.width_prefix ) return code
Fix incorrect TorusBorder wrapping in negative direction
## Code Before: from hecate.core.topology.mixins import DimensionsMixin class Border(DimensionsMixin): """ Base class for all types of borders. """ def __init__(self): self.topology = None class TorusBorder(Border): supported_dimensions = list(range(1, 100)) def wrap_coords(self, coord_prefix): code = "" for i in range(self.dimensions): code += "{x}{i} %= {w}{i};\n".format( x=coord_prefix, i=i, w=self.topology.lattice.width_prefix ) return code ## Instruction: Fix incorrect TorusBorder wrapping in negative direction ## Code After: from hecate.core.topology.mixins import DimensionsMixin class Border(DimensionsMixin): """ Base class for all types of borders. """ def __init__(self): self.topology = None class TorusBorder(Border): supported_dimensions = list(range(1, 100)) def wrap_coords(self, coord_prefix): code = "" for i in range(self.dimensions): code += "{x}{i} = ({x}{i} + {w}{i}) % {w}{i};\n".format( x=coord_prefix, i=i, w=self.topology.lattice.width_prefix ) return code
from hecate.core.topology.mixins import DimensionsMixin class Border(DimensionsMixin): """ Base class for all types of borders. """ def __init__(self): self.topology = None class TorusBorder(Border): supported_dimensions = list(range(1, 100)) def wrap_coords(self, coord_prefix): code = "" for i in range(self.dimensions): - code += "{x}{i} %= {w}{i};\n".format( ? - + code += "{x}{i} = ({x}{i} + {w}{i}) % {w}{i};\n".format( ? ++++++++++++++++++++ x=coord_prefix, i=i, w=self.topology.lattice.width_prefix ) return code
e632fa3e12d3627abaf26f41a9f0483aaea24adf
imager/ImagerProfile/tests.py
imager/ImagerProfile/tests.py
from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.ImagerProfile' django_get_or_create = ('username',) username = 'John'
from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.User' django_get_or_create = ('username',) username = factory.Sequence(lambda n: "Agent %03d" % n)
Change test UserFactory model to point to User
Change test UserFactory model to point to User
Python
mit
nbeck90/django-imager,nbeck90/django-imager
from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: - model = 'imagerprofile.ImagerProfile' + model = 'imagerprofile.User' django_get_or_create = ('username',) - username = 'John' + username = factory.Sequence(lambda n: "Agent %03d" % n)
Change test UserFactory model to point to User
## Code Before: from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.ImagerProfile' django_get_or_create = ('username',) username = 'John' ## Instruction: Change test UserFactory model to point to User ## Code After: from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.User' django_get_or_create = ('username',) username = factory.Sequence(lambda n: "Agent %03d" % n)
from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: - model = 'imagerprofile.ImagerProfile' ? ^^^^ ------- + model = 'imagerprofile.User' ? ^^ django_get_or_create = ('username',) - username = 'John' + username = factory.Sequence(lambda n: "Agent %03d" % n)
1ee41f5439f80af139e612591d48cdac5ecfda39
hiapi/hi.py
hiapi/hi.py
import argparse from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hi!\n' def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-b', '--bind-address', dest='bind', default='127.0.0.1') parser.add_argument('-p', '--port', dest='port', default=4000, type=int) return parser.parse_args() def main(): opts = parse_args() app.run(host=opts.bind, port=opts.port) # Support for uWSGI def application(env, start_response): start_response('200 OK', [('Content-Type', 'text/plain')]) return [b'Hi!\n'] if __name__ == "__main__": main()
import argparse import flask RESPONSE_CODE = 200 app = flask.Flask(__name__) @app.route('/') def hello(): global RESPONSE_CODE if RESPONSE_CODE == 200: return 'Hi!\n' else: flask.abort(RESPONSE_CODE) def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-b', '--bind-address', dest='bind', default='127.0.0.1') parser.add_argument('-p', '--port', dest='port', default=4000, type=int) parser.add_argument('-c', '--response_code', dest='code', default=200, type=int) return parser.parse_args() def main(): global RESPONSE_CODE opts = parse_args() RESPONSE_CODE = opts.code app.run(host=opts.bind, port=opts.port) if __name__ == "__main__": main()
Remove uwsgi support, add support for simple alternative responses
Remove uwsgi support, add support for simple alternative responses
Python
apache-2.0
GradysGhost/pyhiapi
import argparse - from flask import Flask + import flask + RESPONSE_CODE = 200 + - app = Flask(__name__) + app = flask.Flask(__name__) @app.route('/') def hello(): + global RESPONSE_CODE + if RESPONSE_CODE == 200: - return 'Hi!\n' + return 'Hi!\n' + else: + flask.abort(RESPONSE_CODE) def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-b', '--bind-address', dest='bind', default='127.0.0.1') parser.add_argument('-p', '--port', dest='port', default=4000, type=int) + parser.add_argument('-c', '--response_code', dest='code', default=200, type=int) return parser.parse_args() def main(): + global RESPONSE_CODE opts = parse_args() + RESPONSE_CODE = opts.code app.run(host=opts.bind, port=opts.port) - - # Support for uWSGI - def application(env, start_response): - start_response('200 OK', [('Content-Type', 'text/plain')]) - return [b'Hi!\n'] if __name__ == "__main__": main()
Remove uwsgi support, add support for simple alternative responses
## Code Before: import argparse from flask import Flask app = Flask(__name__) @app.route('/') def hello(): return 'Hi!\n' def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-b', '--bind-address', dest='bind', default='127.0.0.1') parser.add_argument('-p', '--port', dest='port', default=4000, type=int) return parser.parse_args() def main(): opts = parse_args() app.run(host=opts.bind, port=opts.port) # Support for uWSGI def application(env, start_response): start_response('200 OK', [('Content-Type', 'text/plain')]) return [b'Hi!\n'] if __name__ == "__main__": main() ## Instruction: Remove uwsgi support, add support for simple alternative responses ## Code After: import argparse import flask RESPONSE_CODE = 200 app = flask.Flask(__name__) @app.route('/') def hello(): global RESPONSE_CODE if RESPONSE_CODE == 200: return 'Hi!\n' else: flask.abort(RESPONSE_CODE) def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-b', '--bind-address', dest='bind', default='127.0.0.1') parser.add_argument('-p', '--port', dest='port', default=4000, type=int) parser.add_argument('-c', '--response_code', dest='code', default=200, type=int) return parser.parse_args() def main(): global RESPONSE_CODE opts = parse_args() RESPONSE_CODE = opts.code app.run(host=opts.bind, port=opts.port) if __name__ == "__main__": main()
import argparse - from flask import Flask + import flask + RESPONSE_CODE = 200 + - app = Flask(__name__) + app = flask.Flask(__name__) ? ++++++ @app.route('/') def hello(): + global RESPONSE_CODE + if RESPONSE_CODE == 200: - return 'Hi!\n' + return 'Hi!\n' ? ++++ + else: + flask.abort(RESPONSE_CODE) def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('-b', '--bind-address', dest='bind', default='127.0.0.1') parser.add_argument('-p', '--port', dest='port', default=4000, type=int) + parser.add_argument('-c', '--response_code', dest='code', default=200, type=int) return parser.parse_args() def main(): + global RESPONSE_CODE opts = parse_args() + RESPONSE_CODE = opts.code app.run(host=opts.bind, port=opts.port) - - # Support for uWSGI - def application(env, start_response): - start_response('200 OK', [('Content-Type', 'text/plain')]) - return [b'Hi!\n'] if __name__ == "__main__": main()
ca32941b82f1c723465a480355242b37ca19848c
setup.py
setup.py
from datetime import datetime from distutils.core import setup import os import subprocess if os.path.exists("MANIFEST"): os.unlink("MANIFEST") VERSION = ("11", "06", "0", "alpha", "0") setup( name='armstrong', version=".".join(VERSION), description="Armstrong is an open-source publishing system designed for news organizations that gives your team the technology edge it needs to report in a media-rich environment.", long_description=open("README.rst").read(), author='Bay Citizen & Texas Tribune', author_email='[email protected]', url='http://github.com/armstrongcms/armstrong/', packages=["armstrong", ], namespace_packages=["armstrong", ], install_requires=[ "armstrong.core.arm_content", "armstrong.core.arm_sections", "armstrong.core.arm_wells", "armstrong.apps.articles", "armstrong.apps.content", "armstrong.apps.events", ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
from datetime import datetime from distutils.core import setup import os import subprocess if os.path.exists("MANIFEST"): os.unlink("MANIFEST") VERSION = ("11", "06", "0", "alpha", "0") setup( name='armstrong', version=".".join(VERSION), description="Armstrong is an open-source publishing system designed for news organizations that gives your team the technology edge it needs to report in a media-rich environment.", long_description=open("README.rst").read(), author='Bay Citizen & Texas Tribune', author_email='[email protected]', url='http://github.com/armstrongcms/armstrong/', packages=["armstrong", ], namespace_packages=["armstrong", ], install_requires=[ "armstrong.cli", "armstrong.core.arm_content", "armstrong.core.arm_sections", "armstrong.core.arm_wells", "armstrong.apps.articles", "armstrong.apps.content", "armstrong.apps.events", ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
Add armstrong.cli to the mix
Add armstrong.cli to the mix
Python
apache-2.0
armstrong/armstrong
from datetime import datetime from distutils.core import setup import os import subprocess if os.path.exists("MANIFEST"): os.unlink("MANIFEST") VERSION = ("11", "06", "0", "alpha", "0") setup( name='armstrong', version=".".join(VERSION), description="Armstrong is an open-source publishing system designed for news organizations that gives your team the technology edge it needs to report in a media-rich environment.", long_description=open("README.rst").read(), author='Bay Citizen & Texas Tribune', author_email='[email protected]', url='http://github.com/armstrongcms/armstrong/', packages=["armstrong", ], namespace_packages=["armstrong", ], install_requires=[ + "armstrong.cli", "armstrong.core.arm_content", "armstrong.core.arm_sections", "armstrong.core.arm_wells", "armstrong.apps.articles", "armstrong.apps.content", "armstrong.apps.events", ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
Add armstrong.cli to the mix
## Code Before: from datetime import datetime from distutils.core import setup import os import subprocess if os.path.exists("MANIFEST"): os.unlink("MANIFEST") VERSION = ("11", "06", "0", "alpha", "0") setup( name='armstrong', version=".".join(VERSION), description="Armstrong is an open-source publishing system designed for news organizations that gives your team the technology edge it needs to report in a media-rich environment.", long_description=open("README.rst").read(), author='Bay Citizen & Texas Tribune', author_email='[email protected]', url='http://github.com/armstrongcms/armstrong/', packages=["armstrong", ], namespace_packages=["armstrong", ], install_requires=[ "armstrong.core.arm_content", "armstrong.core.arm_sections", "armstrong.core.arm_wells", "armstrong.apps.articles", "armstrong.apps.content", "armstrong.apps.events", ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], ) ## Instruction: Add armstrong.cli to the mix ## Code After: from datetime import datetime from distutils.core import setup import os import subprocess if os.path.exists("MANIFEST"): os.unlink("MANIFEST") VERSION = ("11", "06", "0", "alpha", "0") setup( name='armstrong', version=".".join(VERSION), description="Armstrong is an open-source publishing system designed for news organizations that gives your team the technology edge it needs to report in a media-rich environment.", long_description=open("README.rst").read(), author='Bay Citizen & Texas Tribune', author_email='[email protected]', url='http://github.com/armstrongcms/armstrong/', packages=["armstrong", ], namespace_packages=["armstrong", ], install_requires=[ "armstrong.cli", "armstrong.core.arm_content", "armstrong.core.arm_sections", "armstrong.core.arm_wells", "armstrong.apps.articles", "armstrong.apps.content", "armstrong.apps.events", ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
from datetime import datetime from distutils.core import setup import os import subprocess if os.path.exists("MANIFEST"): os.unlink("MANIFEST") VERSION = ("11", "06", "0", "alpha", "0") setup( name='armstrong', version=".".join(VERSION), description="Armstrong is an open-source publishing system designed for news organizations that gives your team the technology edge it needs to report in a media-rich environment.", long_description=open("README.rst").read(), author='Bay Citizen & Texas Tribune', author_email='[email protected]', url='http://github.com/armstrongcms/armstrong/', packages=["armstrong", ], namespace_packages=["armstrong", ], install_requires=[ + "armstrong.cli", "armstrong.core.arm_content", "armstrong.core.arm_sections", "armstrong.core.arm_wells", "armstrong.apps.articles", "armstrong.apps.content", "armstrong.apps.events", ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
ae78bd758c690e28abaae2c07e8a3890e76044e0
pylearn2/scripts/papers/maxout/tests/test_mnist.py
pylearn2/scripts/papers/maxout/tests/test_mnist.py
import os import numpy as np import pylearn2 from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix from pylearn2.termination_criteria import EpochCounter from pylearn2.utils.serial import load_train_file def test_mnist(): """ Test the mnist.yaml file from the dropout paper on random input """ train = load_train_file(os.path.join(pylearn2.__path__[0], "scripts/papers/maxout/mnist.yaml")) random_X = np.random.rand(10, 784) random_y = np.random.randint(0, 10, (10, 1)) train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10) train.algorithm.termination_criterion = EpochCounter(max_epochs=1) train.main_loop()
import os import numpy as np import pylearn2 from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix from pylearn2.termination_criteria import EpochCounter from pylearn2.utils.serial import load_train_file def test_mnist(): """ Test the mnist.yaml file from the dropout paper on random input """ train = load_train_file(os.path.join(pylearn2.__path__[0], "scripts/papers/maxout/mnist.yaml")) random_X = np.random.rand(10, 784) random_y = np.random.randint(0, 10, (10, 1)) train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10) train.algorithm.termination_criterion = EpochCounter(max_epochs=1) train.algorithm._set_monitoring_dataset(train.dataset) train.main_loop()
Allow papers/maxout to be tested without MNIST data
Allow papers/maxout to be tested without MNIST data
Python
bsd-3-clause
KennethPierce/pylearnk,KennethPierce/pylearnk,Refefer/pylearn2,JesseLivezey/plankton,goodfeli/pylearn2,theoryno3/pylearn2,alexjc/pylearn2,pkainz/pylearn2,fulmicoton/pylearn2,alexjc/pylearn2,alexjc/pylearn2,kastnerkyle/pylearn2,ddboline/pylearn2,se4u/pylearn2,hantek/pylearn2,jeremyfix/pylearn2,nouiz/pylearn2,abergeron/pylearn2,sandeepkbhat/pylearn2,chrish42/pylearn,msingh172/pylearn2,caidongyun/pylearn2,fulmicoton/pylearn2,Refefer/pylearn2,skearnes/pylearn2,sandeepkbhat/pylearn2,mclaughlin6464/pylearn2,hantek/pylearn2,sandeepkbhat/pylearn2,fyffyt/pylearn2,bartvm/pylearn2,fulmicoton/pylearn2,jamessergeant/pylearn2,se4u/pylearn2,mkraemer67/pylearn2,TNick/pylearn2,junbochen/pylearn2,sandeepkbhat/pylearn2,hyqneuron/pylearn2-maxsom,woozzu/pylearn2,CIFASIS/pylearn2,pombredanne/pylearn2,fishcorn/pylearn2,JesseLivezey/pylearn2,ashhher3/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,lisa-lab/pylearn2,pombredanne/pylearn2,daemonmaker/pylearn2,ddboline/pylearn2,junbochen/pylearn2,JesseLivezey/plankton,hantek/pylearn2,msingh172/pylearn2,ashhher3/pylearn2,ddboline/pylearn2,matrogers/pylearn2,abergeron/pylearn2,ashhher3/pylearn2,shiquanwang/pylearn2,goodfeli/pylearn2,pombredanne/pylearn2,nouiz/pylearn2,nouiz/pylearn2,matrogers/pylearn2,cosmoharrigan/pylearn2,kastnerkyle/pylearn2,shiquanwang/pylearn2,kose-y/pylearn2,aalmah/pylearn2,abergeron/pylearn2,junbochen/pylearn2,w1kke/pylearn2,fyffyt/pylearn2,daemonmaker/pylearn2,fyffyt/pylearn2,skearnes/pylearn2,JesseLivezey/plankton,mclaughlin6464/pylearn2,nouiz/pylearn2,hantek/pylearn2,se4u/pylearn2,lamblin/pylearn2,ddboline/pylearn2,lisa-lab/pylearn2,kastnerkyle/pylearn2,aalmah/pylearn2,lamblin/pylearn2,theoryno3/pylearn2,JesseLivezey/pylearn2,TNick/pylearn2,bartvm/pylearn2,shiquanwang/pylearn2,JesseLivezey/plankton,shiquanwang/pylearn2,chrish42/pylearn,CIFASIS/pylearn2,chrish42/pylearn,mclaughlin6464/pylearn2,jamessergeant/pylearn2,lancezlin/pylearn2,lisa-lab/pylearn2,jamessergeant/pylearn2,lancezlin/pylearn2,matrogers/pylearn2,mkraemer67/pylearn2,theoryno3/pylearn2,junbochen/pylearn2,daemonmaker/pylearn2,pkainz/pylearn2,theoryno3/pylearn2,CIFASIS/pylearn2,cosmoharrigan/pylearn2,jeremyfix/pylearn2,caidongyun/pylearn2,bartvm/pylearn2,se4u/pylearn2,kose-y/pylearn2,msingh172/pylearn2,KennethPierce/pylearnk,hyqneuron/pylearn2-maxsom,hyqneuron/pylearn2-maxsom,fishcorn/pylearn2,KennethPierce/pylearnk,fyffyt/pylearn2,kose-y/pylearn2,jamessergeant/pylearn2,lancezlin/pylearn2,caidongyun/pylearn2,lamblin/pylearn2,cosmoharrigan/pylearn2,skearnes/pylearn2,goodfeli/pylearn2,matrogers/pylearn2,fishcorn/pylearn2,fulmicoton/pylearn2,w1kke/pylearn2,caidongyun/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,pkainz/pylearn2,Refefer/pylearn2,aalmah/pylearn2,chrish42/pylearn,pkainz/pylearn2,bartvm/pylearn2,woozzu/pylearn2,jeremyfix/pylearn2,ashhher3/pylearn2,pombredanne/pylearn2,jeremyfix/pylearn2,Refefer/pylearn2,lunyang/pylearn2,lisa-lab/pylearn2,JesseLivezey/pylearn2,skearnes/pylearn2,lunyang/pylearn2,TNick/pylearn2,goodfeli/pylearn2,kose-y/pylearn2,woozzu/pylearn2,lancezlin/pylearn2,fishcorn/pylearn2,aalmah/pylearn2,lamblin/pylearn2,daemonmaker/pylearn2,kastnerkyle/pylearn2,JesseLivezey/pylearn2,mclaughlin6464/pylearn2,hyqneuron/pylearn2-maxsom,lunyang/pylearn2,woozzu/pylearn2,abergeron/pylearn2,w1kke/pylearn2,cosmoharrigan/pylearn2,alexjc/pylearn2,TNick/pylearn2,mkraemer67/pylearn2,w1kke/pylearn2
import os import numpy as np import pylearn2 from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix from pylearn2.termination_criteria import EpochCounter from pylearn2.utils.serial import load_train_file + def test_mnist(): """ Test the mnist.yaml file from the dropout paper on random input """ train = load_train_file(os.path.join(pylearn2.__path__[0], "scripts/papers/maxout/mnist.yaml")) random_X = np.random.rand(10, 784) random_y = np.random.randint(0, 10, (10, 1)) train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10) - train.algorithm.termination_criterion = EpochCounter(max_epochs=1) + train.algorithm._set_monitoring_dataset(train.dataset) train.main_loop()
Allow papers/maxout to be tested without MNIST data
## Code Before: import os import numpy as np import pylearn2 from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix from pylearn2.termination_criteria import EpochCounter from pylearn2.utils.serial import load_train_file def test_mnist(): """ Test the mnist.yaml file from the dropout paper on random input """ train = load_train_file(os.path.join(pylearn2.__path__[0], "scripts/papers/maxout/mnist.yaml")) random_X = np.random.rand(10, 784) random_y = np.random.randint(0, 10, (10, 1)) train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10) train.algorithm.termination_criterion = EpochCounter(max_epochs=1) train.main_loop() ## Instruction: Allow papers/maxout to be tested without MNIST data ## Code After: import os import numpy as np import pylearn2 from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix from pylearn2.termination_criteria import EpochCounter from pylearn2.utils.serial import load_train_file def test_mnist(): """ Test the mnist.yaml file from the dropout paper on random input """ train = load_train_file(os.path.join(pylearn2.__path__[0], "scripts/papers/maxout/mnist.yaml")) random_X = np.random.rand(10, 784) random_y = np.random.randint(0, 10, (10, 1)) train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10) train.algorithm.termination_criterion = EpochCounter(max_epochs=1) train.algorithm._set_monitoring_dataset(train.dataset) train.main_loop()
import os import numpy as np import pylearn2 from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix from pylearn2.termination_criteria import EpochCounter from pylearn2.utils.serial import load_train_file + def test_mnist(): """ Test the mnist.yaml file from the dropout paper on random input """ train = load_train_file(os.path.join(pylearn2.__path__[0], "scripts/papers/maxout/mnist.yaml")) random_X = np.random.rand(10, 784) random_y = np.random.randint(0, 10, (10, 1)) train.dataset = DenseDesignMatrix(X=random_X, y=random_y, y_labels=10) - train.algorithm.termination_criterion = EpochCounter(max_epochs=1) + train.algorithm._set_monitoring_dataset(train.dataset) train.main_loop()
05b2848849553172873600ffd6344fc2b1f12d8e
example/__init__.py
example/__init__.py
from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): jurisdiction_id = 'ex' def get_metadata(self): return { 'name': 'Example', 'legislature_name': 'Example Legislature', 'legislature_url': 'http://example.com', 'terms': [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }], 'provides': ['people'], 'parties': [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ], 'session_details': { '2013': {'_scraped_name': '2013'} }, 'feature_flags': [], } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example' def get_metadata(self): return { 'name': 'Example', 'legislature_name': 'Example Legislature', 'legislature_url': 'http://example.com', 'terms': [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }], 'provides': ['people'], 'parties': [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ], 'session_details': { '2013': {'_scraped_name': '2013'} }, 'feature_flags': [], } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
Substitute a more realistic jurisdiction_id
Substitute a more realistic jurisdiction_id
Python
bsd-3-clause
datamade/pupa,mileswwatkins/pupa,rshorey/pupa,opencivicdata/pupa,mileswwatkins/pupa,influence-usa/pupa,datamade/pupa,influence-usa/pupa,rshorey/pupa,opencivicdata/pupa
from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): - jurisdiction_id = 'ex' + jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example' def get_metadata(self): return { 'name': 'Example', 'legislature_name': 'Example Legislature', 'legislature_url': 'http://example.com', 'terms': [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }], 'provides': ['people'], 'parties': [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ], 'session_details': { '2013': {'_scraped_name': '2013'} }, 'feature_flags': [], } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
Substitute a more realistic jurisdiction_id
## Code Before: from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): jurisdiction_id = 'ex' def get_metadata(self): return { 'name': 'Example', 'legislature_name': 'Example Legislature', 'legislature_url': 'http://example.com', 'terms': [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }], 'provides': ['people'], 'parties': [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ], 'session_details': { '2013': {'_scraped_name': '2013'} }, 'feature_flags': [], } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013'] ## Instruction: Substitute a more realistic jurisdiction_id ## Code After: from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example' def get_metadata(self): return { 'name': 'Example', 'legislature_name': 'Example Legislature', 'legislature_url': 'http://example.com', 'terms': [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }], 'provides': ['people'], 'parties': [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ], 'session_details': { '2013': {'_scraped_name': '2013'} }, 'feature_flags': [], } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): - jurisdiction_id = 'ex' + jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example' def get_metadata(self): return { 'name': 'Example', 'legislature_name': 'Example Legislature', 'legislature_url': 'http://example.com', 'terms': [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }], 'provides': ['people'], 'parties': [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ], 'session_details': { '2013': {'_scraped_name': '2013'} }, 'feature_flags': [], } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
f1ef0652acdd9211f8e39eb57845251e7ccc496e
commands.py
commands.py
import generate as gen commands = {} def cmd_func(name): def _cmd_func(f): commands.setdefault(name, f) return f return _cmd_func @cmd_func('get-latest-rev') def get_latest_rev(args): return "%s %s" % (gen.success('( )', gen.string('test')), gen.success('12')) def handle_command(msg): command = msg[0] args = msg [1] if command not in commands: return gen.failure(gen.list('12', gen.string('unknown command: %s' % command), gen.string('commands.py'), '0')) return commands[command](args)
import generate as gen commands = {} def cmd_func(name): def _cmd_func(f): commands.setdefault(name, f) return f return _cmd_func @cmd_func('get-latest-rev') def get_latest_rev(args): return "%s %s" % (gen.success('( )', gen.string('test')), gen.success('12')) def handle_command(msg): command = msg[0] args = msg [1] if command not in commands: return gen.failure(gen.list('210001', gen.string("Unknown command '%s'" % command), gen.string('commands.py'), '0')) return commands[command](args)
Use the real unknown command err-code
Use the real unknown command err-code Using the real unknown command error code means that the client actually understands the error and behaves appropriately.
Python
bsd-3-clause
slonopotamus/git_svn_server
import generate as gen commands = {} def cmd_func(name): def _cmd_func(f): commands.setdefault(name, f) return f return _cmd_func @cmd_func('get-latest-rev') def get_latest_rev(args): return "%s %s" % (gen.success('( )', gen.string('test')), gen.success('12')) def handle_command(msg): command = msg[0] args = msg [1] if command not in commands: - return gen.failure(gen.list('12', + return gen.failure(gen.list('210001', - gen.string('unknown command: %s' % command), + gen.string("Unknown command '%s'" % command), gen.string('commands.py'), '0')) return commands[command](args)
Use the real unknown command err-code
## Code Before: import generate as gen commands = {} def cmd_func(name): def _cmd_func(f): commands.setdefault(name, f) return f return _cmd_func @cmd_func('get-latest-rev') def get_latest_rev(args): return "%s %s" % (gen.success('( )', gen.string('test')), gen.success('12')) def handle_command(msg): command = msg[0] args = msg [1] if command not in commands: return gen.failure(gen.list('12', gen.string('unknown command: %s' % command), gen.string('commands.py'), '0')) return commands[command](args) ## Instruction: Use the real unknown command err-code ## Code After: import generate as gen commands = {} def cmd_func(name): def _cmd_func(f): commands.setdefault(name, f) return f return _cmd_func @cmd_func('get-latest-rev') def get_latest_rev(args): return "%s %s" % (gen.success('( )', gen.string('test')), gen.success('12')) def handle_command(msg): command = msg[0] args = msg [1] if command not in commands: return gen.failure(gen.list('210001', gen.string("Unknown command '%s'" % command), gen.string('commands.py'), '0')) return commands[command](args)
import generate as gen commands = {} def cmd_func(name): def _cmd_func(f): commands.setdefault(name, f) return f return _cmd_func @cmd_func('get-latest-rev') def get_latest_rev(args): return "%s %s" % (gen.success('( )', gen.string('test')), gen.success('12')) def handle_command(msg): command = msg[0] args = msg [1] if command not in commands: - return gen.failure(gen.list('12', ? ^ + return gen.failure(gen.list('210001', ? + ^^^^ - gen.string('unknown command: %s' % command), ? ^^ - + gen.string("Unknown command '%s'" % command), ? ^^ + + gen.string('commands.py'), '0')) return commands[command](args)
1fb1b1fa6ed40b593c00101967b86bf1f94de8ab
atlasseq/cmds/rowjoin.py
atlasseq/cmds/rowjoin.py
from __future__ import print_function import shutil import logging logger = logging.getLogger(__name__) from atlasseq.storage.base import BerkeleyDBStorage def rowjoin(partitioned_data, out_db, N=25000000): db_out = BerkeleyDBStorage(config={'filename': out_db}) for x in ["colour_to_sample_lookup", "sample_to_colour_lookup", "metadata"]: shutil.copy("".join([partitioned_data, "_0", x]), "".join([out_db, x])) batch = 0 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) for i in range(N): if i % 10000 == 0 and not i == 0: logger.info("%i of %i %f%% " % (i, N, 100*i/N)) db.storage.close() batch += 1 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) db_out[i] = db[i] return {'graph': out_db}
from __future__ import print_function import shutil import logging logger = logging.getLogger(__name__) from atlasseq.storage.base import BerkeleyDBStorage def rowjoin(partitioned_data, out_db, N=25000000): N = int(N) db_out = BerkeleyDBStorage(config={'filename': out_db}) for x in ["colour_to_sample_lookup", "sample_to_colour_lookup", "metadata"]: shutil.copy("".join([partitioned_data, "_0", x]), "".join([out_db, x])) batch = 0 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) for i in range(N): if i % 10000 == 0 and not i == 0: logger.info("%i of %i %f%% " % (i, N, 100*i/N)) db.storage.close() batch += 1 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) db_out[i] = db[i] return {'graph': out_db}
Add row join command for merging berkeley DBs
Add row join command for merging berkeley DBs
Python
mit
Phelimb/cbg,Phelimb/cbg,Phelimb/cbg,Phelimb/cbg
from __future__ import print_function import shutil import logging logger = logging.getLogger(__name__) from atlasseq.storage.base import BerkeleyDBStorage def rowjoin(partitioned_data, out_db, N=25000000): + N = int(N) db_out = BerkeleyDBStorage(config={'filename': out_db}) for x in ["colour_to_sample_lookup", "sample_to_colour_lookup", "metadata"]: shutil.copy("".join([partitioned_data, "_0", x]), "".join([out_db, x])) batch = 0 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) for i in range(N): if i % 10000 == 0 and not i == 0: logger.info("%i of %i %f%% " % (i, N, 100*i/N)) db.storage.close() batch += 1 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) db_out[i] = db[i] return {'graph': out_db}
Add row join command for merging berkeley DBs
## Code Before: from __future__ import print_function import shutil import logging logger = logging.getLogger(__name__) from atlasseq.storage.base import BerkeleyDBStorage def rowjoin(partitioned_data, out_db, N=25000000): db_out = BerkeleyDBStorage(config={'filename': out_db}) for x in ["colour_to_sample_lookup", "sample_to_colour_lookup", "metadata"]: shutil.copy("".join([partitioned_data, "_0", x]), "".join([out_db, x])) batch = 0 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) for i in range(N): if i % 10000 == 0 and not i == 0: logger.info("%i of %i %f%% " % (i, N, 100*i/N)) db.storage.close() batch += 1 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) db_out[i] = db[i] return {'graph': out_db} ## Instruction: Add row join command for merging berkeley DBs ## Code After: from __future__ import print_function import shutil import logging logger = logging.getLogger(__name__) from atlasseq.storage.base import BerkeleyDBStorage def rowjoin(partitioned_data, out_db, N=25000000): N = int(N) db_out = BerkeleyDBStorage(config={'filename': out_db}) for x in ["colour_to_sample_lookup", "sample_to_colour_lookup", "metadata"]: shutil.copy("".join([partitioned_data, "_0", x]), "".join([out_db, x])) batch = 0 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) for i in range(N): if i % 10000 == 0 and not i == 0: logger.info("%i of %i %f%% " % (i, N, 100*i/N)) db.storage.close() batch += 1 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) db_out[i] = db[i] return {'graph': out_db}
from __future__ import print_function import shutil import logging logger = logging.getLogger(__name__) from atlasseq.storage.base import BerkeleyDBStorage def rowjoin(partitioned_data, out_db, N=25000000): + N = int(N) db_out = BerkeleyDBStorage(config={'filename': out_db}) for x in ["colour_to_sample_lookup", "sample_to_colour_lookup", "metadata"]: shutil.copy("".join([partitioned_data, "_0", x]), "".join([out_db, x])) batch = 0 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) for i in range(N): if i % 10000 == 0 and not i == 0: logger.info("%i of %i %f%% " % (i, N, 100*i/N)) db.storage.close() batch += 1 db = BerkeleyDBStorage( config={'filename': "".join([partitioned_data, "_", str(batch)])}) db_out[i] = db[i] return {'graph': out_db}
701a6b4a4ed8a4db9f1b961cf8d5a1a6ef5c48a1
gratipay/renderers/csv_dump.py
gratipay/renderers/csv_dump.py
from __future__ import absolute_import, division, print_function, unicode_literals import csv from io import BytesIO from aspen import renderers class Renderer(renderers.Renderer): def render_content(self, context): context['response'].headers['Content-Type'] = 'text/plain' rows = eval(self.compiled, globals(), context) if not rows: return '' f = BytesIO() w = csv.writer(f) if hasattr(rows[0], '_fields'): w.writerow(rows[0]._fields) w.writerows(rows) f.seek(0) return f.read() class Factory(renderers.Factory): Renderer = Renderer
from __future__ import absolute_import, division, print_function, unicode_literals import csv from io import BytesIO from aspen import renderers class Renderer(renderers.Renderer): def render_content(self, context): rows = eval(self.compiled, globals(), context) if not rows: return '' f = BytesIO() w = csv.writer(f) if hasattr(rows[0], '_fields'): w.writerow(rows[0]._fields) w.writerows(rows) f.seek(0) return f.read() class Factory(renderers.Factory): Renderer = Renderer
Remove line that sets content type text/plain
Remove line that sets content type text/plain
Python
mit
studio666/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,mccolgst/www.gittip.com
from __future__ import absolute_import, division, print_function, unicode_literals import csv from io import BytesIO from aspen import renderers class Renderer(renderers.Renderer): def render_content(self, context): - context['response'].headers['Content-Type'] = 'text/plain' rows = eval(self.compiled, globals(), context) if not rows: return '' f = BytesIO() w = csv.writer(f) if hasattr(rows[0], '_fields'): w.writerow(rows[0]._fields) w.writerows(rows) f.seek(0) return f.read() class Factory(renderers.Factory): Renderer = Renderer
Remove line that sets content type text/plain
## Code Before: from __future__ import absolute_import, division, print_function, unicode_literals import csv from io import BytesIO from aspen import renderers class Renderer(renderers.Renderer): def render_content(self, context): context['response'].headers['Content-Type'] = 'text/plain' rows = eval(self.compiled, globals(), context) if not rows: return '' f = BytesIO() w = csv.writer(f) if hasattr(rows[0], '_fields'): w.writerow(rows[0]._fields) w.writerows(rows) f.seek(0) return f.read() class Factory(renderers.Factory): Renderer = Renderer ## Instruction: Remove line that sets content type text/plain ## Code After: from __future__ import absolute_import, division, print_function, unicode_literals import csv from io import BytesIO from aspen import renderers class Renderer(renderers.Renderer): def render_content(self, context): rows = eval(self.compiled, globals(), context) if not rows: return '' f = BytesIO() w = csv.writer(f) if hasattr(rows[0], '_fields'): w.writerow(rows[0]._fields) w.writerows(rows) f.seek(0) return f.read() class Factory(renderers.Factory): Renderer = Renderer
from __future__ import absolute_import, division, print_function, unicode_literals import csv from io import BytesIO from aspen import renderers class Renderer(renderers.Renderer): def render_content(self, context): - context['response'].headers['Content-Type'] = 'text/plain' rows = eval(self.compiled, globals(), context) if not rows: return '' f = BytesIO() w = csv.writer(f) if hasattr(rows[0], '_fields'): w.writerow(rows[0]._fields) w.writerows(rows) f.seek(0) return f.read() class Factory(renderers.Factory): Renderer = Renderer
7ea233b7f955f7dbb291d0662fe321cddfceba80
mopidy/frontends/lastfm/__init__.py
mopidy/frontends/lastfm/__init__.py
from __future__ import unicode_literals import mopidy from mopidy import ext from mopidy.exceptions import ExtensionError __doc__ = """ Frontend which scrobbles the music you play to your `Last.fm <http://www.last.fm>`_ profile. .. note:: This frontend requires a free user account at Last.fm. **Dependencies:** .. literalinclude:: ../../../requirements/lastfm.txt **Settings:** - :attr:`mopidy.settings.LASTFM_USERNAME` - :attr:`mopidy.settings.LASTFM_PASSWORD` **Usage:** The frontend is enabled by default if all dependencies are available. """ class Extension(ext.Extension): name = 'Mopidy-Lastfm' version = mopidy.__version__ def get_default_config(self): return '[ext.lastfm]' def validate_config(self, config): pass def validate_environment(self): try: import pylast # noqa except ImportError as e: raise ExtensionError('pylast library not found', e) def get_frontend_classes(self): from .actor import LastfmFrontend return [LastfmFrontend]
from __future__ import unicode_literals import mopidy from mopidy import exceptions, ext from mopidy.utils import config, formatting default_config = """ [ext.lastfm] # If the Last.fm extension should be enabled or not enabled = true # Your Last.fm username username = # Your Last.fm password password = """ __doc__ = """ Frontend which scrobbles the music you play to your `Last.fm <http://www.last.fm>`_ profile. .. note:: This frontend requires a free user account at Last.fm. **Dependencies:** .. literalinclude:: ../../../requirements/lastfm.txt **Default config:** .. code-block:: ini %(config)s **Usage:** The frontend is enabled by default if all dependencies are available. """ % {'config': formatting.indent(default_config)} class Extension(ext.Extension): name = 'Mopidy-Lastfm' version = mopidy.__version__ def get_default_config(self): return default_config def get_config_schema(self): schema = config.ExtensionConfigSchema() schema['username'] = config.String() schema['password'] = config.String(secret=True) return schema def validate_environment(self): try: import pylast # noqa except ImportError as e: raise exceptions.ExtensionError('pylast library not found', e) def get_frontend_classes(self): from .actor import LastfmFrontend return [LastfmFrontend]
Add default config and config schema
lastfm: Add default config and config schema
Python
apache-2.0
diandiankan/mopidy,jmarsik/mopidy,ZenithDK/mopidy,diandiankan/mopidy,bacontext/mopidy,ali/mopidy,jcass77/mopidy,quartz55/mopidy,rawdlite/mopidy,priestd09/mopidy,kingosticks/mopidy,mopidy/mopidy,bencevans/mopidy,swak/mopidy,mokieyue/mopidy,hkariti/mopidy,quartz55/mopidy,kingosticks/mopidy,vrs01/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,rawdlite/mopidy,vrs01/mopidy,liamw9534/mopidy,jodal/mopidy,ali/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,abarisain/mopidy,adamcik/mopidy,woutervanwijk/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,adamcik/mopidy,tkem/mopidy,dbrgn/mopidy,diandiankan/mopidy,rawdlite/mopidy,mopidy/mopidy,priestd09/mopidy,glogiotatidis/mopidy,tkem/mopidy,ZenithDK/mopidy,hkariti/mopidy,rawdlite/mopidy,vrs01/mopidy,mokieyue/mopidy,jodal/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,adamcik/mopidy,liamw9534/mopidy,quartz55/mopidy,pacificIT/mopidy,bencevans/mopidy,ali/mopidy,bencevans/mopidy,dbrgn/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,mokieyue/mopidy,jcass77/mopidy,abarisain/mopidy,tkem/mopidy,dbrgn/mopidy,hkariti/mopidy,jmarsik/mopidy,jodal/mopidy,swak/mopidy,vrs01/mopidy,quartz55/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,bacontext/mopidy,jmarsik/mopidy,bacontext/mopidy,mokieyue/mopidy,tkem/mopidy,swak/mopidy,priestd09/mopidy,hkariti/mopidy,woutervanwijk/mopidy,pacificIT/mopidy,ali/mopidy,bacontext/mopidy,bencevans/mopidy,mopidy/mopidy,kingosticks/mopidy,glogiotatidis/mopidy,jcass77/mopidy,swak/mopidy,pacificIT/mopidy
from __future__ import unicode_literals import mopidy - from mopidy import ext + from mopidy import exceptions, ext - from mopidy.exceptions import ExtensionError + from mopidy.utils import config, formatting + + default_config = """ + [ext.lastfm] + + # If the Last.fm extension should be enabled or not + enabled = true + + # Your Last.fm username + username = + + # Your Last.fm password + password = + """ __doc__ = """ Frontend which scrobbles the music you play to your `Last.fm <http://www.last.fm>`_ profile. .. note:: This frontend requires a free user account at Last.fm. **Dependencies:** .. literalinclude:: ../../../requirements/lastfm.txt - **Settings:** + **Default config:** - - :attr:`mopidy.settings.LASTFM_USERNAME` - - :attr:`mopidy.settings.LASTFM_PASSWORD` + .. code-block:: ini + + %(config)s **Usage:** The frontend is enabled by default if all dependencies are available. - """ + """ % {'config': formatting.indent(default_config)} class Extension(ext.Extension): name = 'Mopidy-Lastfm' version = mopidy.__version__ def get_default_config(self): - return '[ext.lastfm]' + return default_config - def validate_config(self, config): - pass + def get_config_schema(self): + schema = config.ExtensionConfigSchema() + schema['username'] = config.String() + schema['password'] = config.String(secret=True) + return schema def validate_environment(self): try: import pylast # noqa except ImportError as e: - raise ExtensionError('pylast library not found', e) + raise exceptions.ExtensionError('pylast library not found', e) def get_frontend_classes(self): from .actor import LastfmFrontend return [LastfmFrontend]
Add default config and config schema
## Code Before: from __future__ import unicode_literals import mopidy from mopidy import ext from mopidy.exceptions import ExtensionError __doc__ = """ Frontend which scrobbles the music you play to your `Last.fm <http://www.last.fm>`_ profile. .. note:: This frontend requires a free user account at Last.fm. **Dependencies:** .. literalinclude:: ../../../requirements/lastfm.txt **Settings:** - :attr:`mopidy.settings.LASTFM_USERNAME` - :attr:`mopidy.settings.LASTFM_PASSWORD` **Usage:** The frontend is enabled by default if all dependencies are available. """ class Extension(ext.Extension): name = 'Mopidy-Lastfm' version = mopidy.__version__ def get_default_config(self): return '[ext.lastfm]' def validate_config(self, config): pass def validate_environment(self): try: import pylast # noqa except ImportError as e: raise ExtensionError('pylast library not found', e) def get_frontend_classes(self): from .actor import LastfmFrontend return [LastfmFrontend] ## Instruction: Add default config and config schema ## Code After: from __future__ import unicode_literals import mopidy from mopidy import exceptions, ext from mopidy.utils import config, formatting default_config = """ [ext.lastfm] # If the Last.fm extension should be enabled or not enabled = true # Your Last.fm username username = # Your Last.fm password password = """ __doc__ = """ Frontend which scrobbles the music you play to your `Last.fm <http://www.last.fm>`_ profile. .. note:: This frontend requires a free user account at Last.fm. **Dependencies:** .. literalinclude:: ../../../requirements/lastfm.txt **Default config:** .. code-block:: ini %(config)s **Usage:** The frontend is enabled by default if all dependencies are available. """ % {'config': formatting.indent(default_config)} class Extension(ext.Extension): name = 'Mopidy-Lastfm' version = mopidy.__version__ def get_default_config(self): return default_config def get_config_schema(self): schema = config.ExtensionConfigSchema() schema['username'] = config.String() schema['password'] = config.String(secret=True) return schema def validate_environment(self): try: import pylast # noqa except ImportError as e: raise exceptions.ExtensionError('pylast library not found', e) def get_frontend_classes(self): from .actor import LastfmFrontend return [LastfmFrontend]
from __future__ import unicode_literals import mopidy - from mopidy import ext + from mopidy import exceptions, ext ? +++ +++++++++ - from mopidy.exceptions import ExtensionError + from mopidy.utils import config, formatting + + default_config = """ + [ext.lastfm] + + # If the Last.fm extension should be enabled or not + enabled = true + + # Your Last.fm username + username = + + # Your Last.fm password + password = + """ __doc__ = """ Frontend which scrobbles the music you play to your `Last.fm <http://www.last.fm>`_ profile. .. note:: This frontend requires a free user account at Last.fm. **Dependencies:** .. literalinclude:: ../../../requirements/lastfm.txt - **Settings:** + **Default config:** - - :attr:`mopidy.settings.LASTFM_USERNAME` - - :attr:`mopidy.settings.LASTFM_PASSWORD` + .. code-block:: ini + + %(config)s **Usage:** The frontend is enabled by default if all dependencies are available. - """ + """ % {'config': formatting.indent(default_config)} class Extension(ext.Extension): name = 'Mopidy-Lastfm' version = mopidy.__version__ def get_default_config(self): - return '[ext.lastfm]' + return default_config - def validate_config(self, config): - pass + def get_config_schema(self): + schema = config.ExtensionConfigSchema() + schema['username'] = config.String() + schema['password'] = config.String(secret=True) + return schema def validate_environment(self): try: import pylast # noqa except ImportError as e: - raise ExtensionError('pylast library not found', e) + raise exceptions.ExtensionError('pylast library not found', e) ? +++++++++++ def get_frontend_classes(self): from .actor import LastfmFrontend return [LastfmFrontend]
e8175497157ed34f91b9ba96118c4e76cd3ed0e4
bmsmodules/Events.py
bmsmodules/Events.py
from operator import isCallable class Events(object): def __init__(self): self._events_ = {} def addEvent(self, eventname, func): if not isCallable(func): raise RuntimeError("func argument must be a function!") elif not isinstance(eventname, basestring): raise RuntimeError("Event name must be a string!") elif eventname in self._events_: raise RuntimeError("Event name already exists!") else: self._events_[eventname] = func def execEvent(self, eventname, *args, **kwargs): if eventname not in self._events_: raise RuntimeError("No such Event name '{0}'".format(eventname))
from operator import isCallable class Events(object): def __init__(self): self._events_ = {} def addEvent(self, eventname, func): if not isCallable(func): raise RuntimeError("func argument must be a function!") elif not isinstance(eventname, (basestring, int)): raise RuntimeError("Event name must be a string!") elif eventname in self._events_: raise RuntimeError("Event name already exists!") else: self._events_[eventname] = func def execEvent(self, eventname, *args, **kwargs): if eventname not in self._events_: raise RuntimeError("No such Event name '{0}'".format(eventname)) else: self._events_[eventname](*args, **kwargs)
Add event execution, allow integers as event name
Add event execution, allow integers as event name
Python
bsd-3-clause
RenolY2/py-playBMS
from operator import isCallable class Events(object): def __init__(self): self._events_ = {} def addEvent(self, eventname, func): if not isCallable(func): raise RuntimeError("func argument must be a function!") - elif not isinstance(eventname, basestring): + elif not isinstance(eventname, (basestring, int)): raise RuntimeError("Event name must be a string!") elif eventname in self._events_: raise RuntimeError("Event name already exists!") else: self._events_[eventname] = func def execEvent(self, eventname, *args, **kwargs): if eventname not in self._events_: raise RuntimeError("No such Event name '{0}'".format(eventname)) + else: + self._events_[eventname](*args, **kwargs)
Add event execution, allow integers as event name
## Code Before: from operator import isCallable class Events(object): def __init__(self): self._events_ = {} def addEvent(self, eventname, func): if not isCallable(func): raise RuntimeError("func argument must be a function!") elif not isinstance(eventname, basestring): raise RuntimeError("Event name must be a string!") elif eventname in self._events_: raise RuntimeError("Event name already exists!") else: self._events_[eventname] = func def execEvent(self, eventname, *args, **kwargs): if eventname not in self._events_: raise RuntimeError("No such Event name '{0}'".format(eventname)) ## Instruction: Add event execution, allow integers as event name ## Code After: from operator import isCallable class Events(object): def __init__(self): self._events_ = {} def addEvent(self, eventname, func): if not isCallable(func): raise RuntimeError("func argument must be a function!") elif not isinstance(eventname, (basestring, int)): raise RuntimeError("Event name must be a string!") elif eventname in self._events_: raise RuntimeError("Event name already exists!") else: self._events_[eventname] = func def execEvent(self, eventname, *args, **kwargs): if eventname not in self._events_: raise RuntimeError("No such Event name '{0}'".format(eventname)) else: self._events_[eventname](*args, **kwargs)
from operator import isCallable class Events(object): def __init__(self): self._events_ = {} def addEvent(self, eventname, func): if not isCallable(func): raise RuntimeError("func argument must be a function!") - elif not isinstance(eventname, basestring): + elif not isinstance(eventname, (basestring, int)): ? + ++++++ raise RuntimeError("Event name must be a string!") elif eventname in self._events_: raise RuntimeError("Event name already exists!") else: self._events_[eventname] = func def execEvent(self, eventname, *args, **kwargs): if eventname not in self._events_: raise RuntimeError("No such Event name '{0}'".format(eventname)) + else: + self._events_[eventname](*args, **kwargs)
9da3f2a835fa2aaba5d91ffe31b3fcaf8d83a4c9
snake/main.py
snake/main.py
import os import sys from snake.core import Snake SNAKEFILE_LOADED = False def abort(msg): print >> sys.stderr, "Error: %s" % msg sys.exit(1) def load_snakefile(path, fail_silently=False): global SNAKEFILE_LOADED if not SNAKEFILE_LOADED: sys.path.insert(0, path) try: return __import__('snakefile') except ImportError: if not fail_silently: abort("couldn't find any snakefile.") else: SNAKEFILE_LOADED = True del sys.path[0] def find_snakefile(): global SNAKEFILE_LOADED path = os.getcwd() while True: filepath = os.path.join(path, 'snakefile.py') if os.path.isfile(filepath): return load_snakefile(path), filepath if not os.path.split(path)[1]: break path = os.path.split(path)[0] if not SNAKEFILE_LOADED: abort("couldn't find any snakefile.") def main(): snakefile, snakefilepath = find_snakefile() for name in dir(snakefile): attr = getattr(snakefile, name) if isinstance(attr, Snake): attr.run(snakefilepath) break else: abort("couldn't find any Snake instance in snakefile.")
import imp import os import sys from snake.core import Snake def abort(msg): print >> sys.stderr, "Error: %s" % msg sys.exit(1) def get_ascending_paths(path): paths = [] while True: paths.append(path) path, tail = os.path.split(path) if not tail: break return paths def find_snakefile(): paths = get_ascending_paths(os.getcwd()) try: return imp.find_module('snakefile', paths) except: abort("couldn't find any snakefile.") def get_snakefile(): return imp.load_module('snakefile', *find_snakefile()) def main(): snakefile = get_snakefile() for name in dir(snakefile): attr = getattr(snakefile, name) if isinstance(attr, Snake): attr.run(snakefile.__file__) break else: abort("couldn't find any Snake instance in snakefile.")
Improve the way snakefile loading works
Improve the way snakefile loading works
Python
bsd-2-clause
yumike/snake
+ import imp import os import sys - from snake.core import Snake - - - SNAKEFILE_LOADED = False def abort(msg): print >> sys.stderr, "Error: %s" % msg sys.exit(1) + def get_ascending_paths(path): + paths = [] + while True: + paths.append(path) + path, tail = os.path.split(path) + if not tail: + break + return paths - def load_snakefile(path, fail_silently=False): - global SNAKEFILE_LOADED - if not SNAKEFILE_LOADED: - sys.path.insert(0, path) - try: - return __import__('snakefile') - except ImportError: - if not fail_silently: - abort("couldn't find any snakefile.") - else: - SNAKEFILE_LOADED = True - del sys.path[0] def find_snakefile(): + paths = get_ascending_paths(os.getcwd()) + try: + return imp.find_module('snakefile', paths) + except: - global SNAKEFILE_LOADED - path = os.getcwd() - while True: - filepath = os.path.join(path, 'snakefile.py') - if os.path.isfile(filepath): - return load_snakefile(path), filepath - if not os.path.split(path)[1]: - break - path = os.path.split(path)[0] - if not SNAKEFILE_LOADED: abort("couldn't find any snakefile.") + def get_snakefile(): + return imp.load_module('snakefile', *find_snakefile()) + + def main(): - snakefile, snakefilepath = find_snakefile() + snakefile = get_snakefile() for name in dir(snakefile): attr = getattr(snakefile, name) if isinstance(attr, Snake): - attr.run(snakefilepath) + attr.run(snakefile.__file__) break else: abort("couldn't find any Snake instance in snakefile.")
Improve the way snakefile loading works
## Code Before: import os import sys from snake.core import Snake SNAKEFILE_LOADED = False def abort(msg): print >> sys.stderr, "Error: %s" % msg sys.exit(1) def load_snakefile(path, fail_silently=False): global SNAKEFILE_LOADED if not SNAKEFILE_LOADED: sys.path.insert(0, path) try: return __import__('snakefile') except ImportError: if not fail_silently: abort("couldn't find any snakefile.") else: SNAKEFILE_LOADED = True del sys.path[0] def find_snakefile(): global SNAKEFILE_LOADED path = os.getcwd() while True: filepath = os.path.join(path, 'snakefile.py') if os.path.isfile(filepath): return load_snakefile(path), filepath if not os.path.split(path)[1]: break path = os.path.split(path)[0] if not SNAKEFILE_LOADED: abort("couldn't find any snakefile.") def main(): snakefile, snakefilepath = find_snakefile() for name in dir(snakefile): attr = getattr(snakefile, name) if isinstance(attr, Snake): attr.run(snakefilepath) break else: abort("couldn't find any Snake instance in snakefile.") ## Instruction: Improve the way snakefile loading works ## Code After: import imp import os import sys from snake.core import Snake def abort(msg): print >> sys.stderr, "Error: %s" % msg sys.exit(1) def get_ascending_paths(path): paths = [] while True: paths.append(path) path, tail = os.path.split(path) if not tail: break return paths def find_snakefile(): paths = get_ascending_paths(os.getcwd()) try: return imp.find_module('snakefile', paths) except: abort("couldn't find any snakefile.") def get_snakefile(): return imp.load_module('snakefile', *find_snakefile()) def main(): snakefile = get_snakefile() for name in dir(snakefile): attr = getattr(snakefile, name) if isinstance(attr, Snake): attr.run(snakefile.__file__) break else: abort("couldn't find any Snake instance in snakefile.")
+ import imp import os import sys - from snake.core import Snake - - - SNAKEFILE_LOADED = False def abort(msg): print >> sys.stderr, "Error: %s" % msg sys.exit(1) + def get_ascending_paths(path): + paths = [] + while True: + paths.append(path) + path, tail = os.path.split(path) + if not tail: + break + return paths - def load_snakefile(path, fail_silently=False): - global SNAKEFILE_LOADED - if not SNAKEFILE_LOADED: - sys.path.insert(0, path) - try: - return __import__('snakefile') - except ImportError: - if not fail_silently: - abort("couldn't find any snakefile.") - else: - SNAKEFILE_LOADED = True - del sys.path[0] def find_snakefile(): + paths = get_ascending_paths(os.getcwd()) + try: + return imp.find_module('snakefile', paths) + except: - global SNAKEFILE_LOADED - path = os.getcwd() - while True: - filepath = os.path.join(path, 'snakefile.py') - if os.path.isfile(filepath): - return load_snakefile(path), filepath - if not os.path.split(path)[1]: - break - path = os.path.split(path)[0] - if not SNAKEFILE_LOADED: abort("couldn't find any snakefile.") + def get_snakefile(): + return imp.load_module('snakefile', *find_snakefile()) + + def main(): - snakefile, snakefilepath = find_snakefile() + snakefile = get_snakefile() for name in dir(snakefile): attr = getattr(snakefile, name) if isinstance(attr, Snake): - attr.run(snakefilepath) ? ^^^^ + attr.run(snakefile.__file__) ? ^^^^^^^^^ break else: abort("couldn't find any Snake instance in snakefile.")
143c0188566ac07ac3fdb9e6dfca8863cc169bbb
ts3observer/observer.py
ts3observer/observer.py
''' Created on Nov 9, 2014 @author: fechnert ''' import yaml import logging import features class Configuration(dict): ''' Read and provide the yaml config ''' def __init__(self, path): ''' Initialize the file ''' with open(path, 'r') as f: self.update(yaml.load(f)) class Supervisor(object): ''' Guide the different features to do their work ''' def __init__(self): ''' Initialize the Config ''' self.config = Configuration('config.yml') def execute(self): for feature in self._import_features().values(): try: feature.run() except NotImplementedError: logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__)) def _get_enabled_features(self): ''' Get all features which are enabled in config ''' features = [] for feature in self.config['features']: if self.config['features'][feature]['enable']: features.append(feature) return features def _import_features(self): ''' Import only the needed features ''' feature_objects = {} for feature in self._get_enabled_features(): feature_objects.update({ feature: getattr(features, feature)(self.config['features'][feature]) }) return feature_objects
''' Created on Nov 9, 2014 @author: fechnert ''' import yaml import logging import features class Configuration(dict): ''' Read and provide the yaml config ''' def __init__(self, path): ''' Initialize the file ''' with open(path, 'r') as f: self.update(yaml.load(f)) class Supervisor(object): ''' Guide the different features to do their work ''' def __init__(self): ''' Initialize the Config ''' self.config = Configuration('config.yml') def execute(self): for feature in self._import_features().values(): try: feature.run() except NotImplementedError: logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__)) def _get_enabled_features(self): ''' Get all features which are enabled in config ''' features = [] for feature in self.config['features']: if self.config['features'][feature]['enable']: features.append(feature) return features def _import_features(self): ''' Import only the needed features ''' feature_objects = {} for feature in self._get_enabled_features(): feature_objects.update({ feature: getattr(features, feature)(self.config['features'][feature]) }) return feature_objects class Client(object): ''' Represents the client ''' def __init__(self, **kwargs): ''' Fill the object dynamically with client attributes got from telnet ''' for key, value in kwargs.items(): setattr(self, key, value) class Channel(object): ''' Represents the Channel ''' def __init__(self, **kwargs): ''' Fill the object dynamically with channel attributes got from telnet ''' for key, value in kwargs.items(): setattr(self, key, value)
Add client and channel models
Add client and channel models
Python
mit
HWDexperte/ts3observer
''' Created on Nov 9, 2014 @author: fechnert ''' import yaml import logging import features class Configuration(dict): ''' Read and provide the yaml config ''' def __init__(self, path): ''' Initialize the file ''' with open(path, 'r') as f: self.update(yaml.load(f)) class Supervisor(object): ''' Guide the different features to do their work ''' def __init__(self): ''' Initialize the Config ''' self.config = Configuration('config.yml') def execute(self): for feature in self._import_features().values(): try: feature.run() except NotImplementedError: logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__)) def _get_enabled_features(self): ''' Get all features which are enabled in config ''' features = [] for feature in self.config['features']: if self.config['features'][feature]['enable']: features.append(feature) return features def _import_features(self): ''' Import only the needed features ''' feature_objects = {} for feature in self._get_enabled_features(): feature_objects.update({ feature: getattr(features, feature)(self.config['features'][feature]) }) return feature_objects + + class Client(object): + ''' Represents the client ''' + + def __init__(self, **kwargs): + ''' Fill the object dynamically with client attributes got from telnet ''' + for key, value in kwargs.items(): + setattr(self, key, value) + + + class Channel(object): + ''' Represents the Channel ''' + + def __init__(self, **kwargs): + ''' Fill the object dynamically with channel attributes got from telnet ''' + for key, value in kwargs.items(): + setattr(self, key, value) +
Add client and channel models
## Code Before: ''' Created on Nov 9, 2014 @author: fechnert ''' import yaml import logging import features class Configuration(dict): ''' Read and provide the yaml config ''' def __init__(self, path): ''' Initialize the file ''' with open(path, 'r') as f: self.update(yaml.load(f)) class Supervisor(object): ''' Guide the different features to do their work ''' def __init__(self): ''' Initialize the Config ''' self.config = Configuration('config.yml') def execute(self): for feature in self._import_features().values(): try: feature.run() except NotImplementedError: logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__)) def _get_enabled_features(self): ''' Get all features which are enabled in config ''' features = [] for feature in self.config['features']: if self.config['features'][feature]['enable']: features.append(feature) return features def _import_features(self): ''' Import only the needed features ''' feature_objects = {} for feature in self._get_enabled_features(): feature_objects.update({ feature: getattr(features, feature)(self.config['features'][feature]) }) return feature_objects ## Instruction: Add client and channel models ## Code After: ''' Created on Nov 9, 2014 @author: fechnert ''' import yaml import logging import features class Configuration(dict): ''' Read and provide the yaml config ''' def __init__(self, path): ''' Initialize the file ''' with open(path, 'r') as f: self.update(yaml.load(f)) class Supervisor(object): ''' Guide the different features to do their work ''' def __init__(self): ''' Initialize the Config ''' self.config = Configuration('config.yml') def execute(self): for feature in self._import_features().values(): try: feature.run() except NotImplementedError: logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__)) def _get_enabled_features(self): ''' Get all features which are enabled in config ''' features = [] for feature in self.config['features']: if self.config['features'][feature]['enable']: features.append(feature) return features def _import_features(self): ''' Import only the needed features ''' feature_objects = {} for feature in self._get_enabled_features(): feature_objects.update({ feature: getattr(features, feature)(self.config['features'][feature]) }) return feature_objects class Client(object): ''' Represents the client ''' def __init__(self, **kwargs): ''' Fill the object dynamically with client attributes got from telnet ''' for key, value in kwargs.items(): setattr(self, key, value) class Channel(object): ''' Represents the Channel ''' def __init__(self, **kwargs): ''' Fill the object dynamically with channel attributes got from telnet ''' for key, value in kwargs.items(): setattr(self, key, value)
''' Created on Nov 9, 2014 @author: fechnert ''' import yaml import logging import features class Configuration(dict): ''' Read and provide the yaml config ''' def __init__(self, path): ''' Initialize the file ''' with open(path, 'r') as f: self.update(yaml.load(f)) class Supervisor(object): ''' Guide the different features to do their work ''' def __init__(self): ''' Initialize the Config ''' self.config = Configuration('config.yml') def execute(self): for feature in self._import_features().values(): try: feature.run() except NotImplementedError: logging.warn('Can\'t run Feature \'{}\''.format(feature.__class__.__name__)) def _get_enabled_features(self): ''' Get all features which are enabled in config ''' features = [] for feature in self.config['features']: if self.config['features'][feature]['enable']: features.append(feature) return features def _import_features(self): ''' Import only the needed features ''' feature_objects = {} for feature in self._get_enabled_features(): feature_objects.update({ feature: getattr(features, feature)(self.config['features'][feature]) }) return feature_objects + + + class Client(object): + ''' Represents the client ''' + + def __init__(self, **kwargs): + ''' Fill the object dynamically with client attributes got from telnet ''' + for key, value in kwargs.items(): + setattr(self, key, value) + + + class Channel(object): + ''' Represents the Channel ''' + + def __init__(self, **kwargs): + ''' Fill the object dynamically with channel attributes got from telnet ''' + for key, value in kwargs.items(): + setattr(self, key, value)
f4f5a6ffa1fd60437b83bfc435a180ddf2433ea4
tests/test_confirmation.py
tests/test_confirmation.py
import pytest import linkatos.confirmation as confirmation def test_no_confirmation_with_url(): expecting_confirmation = False parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is True # to do test process_confirmation_if_yn(parsed_message, expecting_confirmation) def test_confirmation_with_url(): expecting_confirmation = True parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is True def test_confirmation_without_url(): expecting_confirmation = True parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is True def test_no_confirmation_without_url(): expecting_confirmation = False parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is False
import pytest import linkatos.confirmation as confirmation def test_no_confirmation_with_url(): expecting_confirmation = False parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True # to do test process_confirmation_if_yn(parsed_message, expecting_confirmation) def test_confirmation_with_url(): expecting_confirmation = True parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_confirmation_without_url(): expecting_confirmation = True parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_no_confirmation_without_url(): expecting_confirmation = False parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is False
Fix change of function names
test: Fix change of function names
Python
mit
iwi/linkatos,iwi/linkatos
import pytest import linkatos.confirmation as confirmation def test_no_confirmation_with_url(): expecting_confirmation = False parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} - assert confirmation.update_confirmation_if_url(parsed_m, + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True # to do test process_confirmation_if_yn(parsed_message, expecting_confirmation) def test_confirmation_with_url(): expecting_confirmation = True parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} - assert confirmation.update_confirmation_if_url(parsed_m, + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_confirmation_without_url(): expecting_confirmation = True parsed_m = {'out': None, 'channel': 'ch', 'type': None} - assert confirmation.update_confirmation_if_url(parsed_m, + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_no_confirmation_without_url(): expecting_confirmation = False parsed_m = {'out': None, 'channel': 'ch', 'type': None} - assert confirmation.update_confirmation_if_url(parsed_m, + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is False
Fix change of function names
## Code Before: import pytest import linkatos.confirmation as confirmation def test_no_confirmation_with_url(): expecting_confirmation = False parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is True # to do test process_confirmation_if_yn(parsed_message, expecting_confirmation) def test_confirmation_with_url(): expecting_confirmation = True parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is True def test_confirmation_without_url(): expecting_confirmation = True parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is True def test_no_confirmation_without_url(): expecting_confirmation = False parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_confirmation_if_url(parsed_m, expecting_confirmation) is False ## Instruction: Fix change of function names ## Code After: import pytest import linkatos.confirmation as confirmation def test_no_confirmation_with_url(): expecting_confirmation = False parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True # to do test process_confirmation_if_yn(parsed_message, expecting_confirmation) def test_confirmation_with_url(): expecting_confirmation = True parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_confirmation_without_url(): expecting_confirmation = True parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_no_confirmation_without_url(): expecting_confirmation = False parsed_m = {'out': None, 'channel': 'ch', 'type': None} assert confirmation.update_if_url(parsed_m, expecting_confirmation) is False
import pytest import linkatos.confirmation as confirmation def test_no_confirmation_with_url(): expecting_confirmation = False parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} - assert confirmation.update_confirmation_if_url(parsed_m, ? ------------- + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True # to do test process_confirmation_if_yn(parsed_message, expecting_confirmation) def test_confirmation_with_url(): expecting_confirmation = True parsed_m = {'out': 'http://ex.org', 'channel': 'ch', 'type': 'url'} - assert confirmation.update_confirmation_if_url(parsed_m, ? ------------- + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_confirmation_without_url(): expecting_confirmation = True parsed_m = {'out': None, 'channel': 'ch', 'type': None} - assert confirmation.update_confirmation_if_url(parsed_m, ? ------------- + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is True def test_no_confirmation_without_url(): expecting_confirmation = False parsed_m = {'out': None, 'channel': 'ch', 'type': None} - assert confirmation.update_confirmation_if_url(parsed_m, ? ------------- + assert confirmation.update_if_url(parsed_m, expecting_confirmation) is False
c47b2d88fce9f890e7356288faf097cf4a97f0b8
simplesqlite/_logger/_logger.py
simplesqlite/_logger/_logger.py
from __future__ import absolute_import, unicode_literals import sqliteschema import tabledata from ._null_logger import NullLogger MODULE_NAME = "simplesqlite" try: from loguru import logger logger.disable(MODULE_NAME) except ImportError: logger = NullLogger() def set_logger(is_enable): if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) tabledata.set_logger(is_enable) sqliteschema.set_logger(is_enable) try: import pytablereader pytablereader.set_logger(is_enable) except ImportError: pass def set_log_level(log_level): # deprecated return
from __future__ import absolute_import, unicode_literals import sqliteschema import tabledata from ._null_logger import NullLogger MODULE_NAME = "simplesqlite" _is_enable = False try: from loguru import logger logger.disable(MODULE_NAME) except ImportError: logger = NullLogger() def set_logger(is_enable): global _is_enable if is_enable == _is_enable: return if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) tabledata.set_logger(is_enable) sqliteschema.set_logger(is_enable) try: import pytablereader pytablereader.set_logger(is_enable) except ImportError: pass def set_log_level(log_level): # deprecated return
Add check for logging state
Add check for logging state
Python
mit
thombashi/SimpleSQLite,thombashi/SimpleSQLite
from __future__ import absolute_import, unicode_literals import sqliteschema import tabledata from ._null_logger import NullLogger MODULE_NAME = "simplesqlite" + _is_enable = False try: from loguru import logger logger.disable(MODULE_NAME) except ImportError: logger = NullLogger() def set_logger(is_enable): + global _is_enable + + if is_enable == _is_enable: + return + if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) tabledata.set_logger(is_enable) sqliteschema.set_logger(is_enable) try: import pytablereader pytablereader.set_logger(is_enable) except ImportError: pass def set_log_level(log_level): # deprecated return
Add check for logging state
## Code Before: from __future__ import absolute_import, unicode_literals import sqliteschema import tabledata from ._null_logger import NullLogger MODULE_NAME = "simplesqlite" try: from loguru import logger logger.disable(MODULE_NAME) except ImportError: logger = NullLogger() def set_logger(is_enable): if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) tabledata.set_logger(is_enable) sqliteschema.set_logger(is_enable) try: import pytablereader pytablereader.set_logger(is_enable) except ImportError: pass def set_log_level(log_level): # deprecated return ## Instruction: Add check for logging state ## Code After: from __future__ import absolute_import, unicode_literals import sqliteschema import tabledata from ._null_logger import NullLogger MODULE_NAME = "simplesqlite" _is_enable = False try: from loguru import logger logger.disable(MODULE_NAME) except ImportError: logger = NullLogger() def set_logger(is_enable): global _is_enable if is_enable == _is_enable: return if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) tabledata.set_logger(is_enable) sqliteschema.set_logger(is_enable) try: import pytablereader pytablereader.set_logger(is_enable) except ImportError: pass def set_log_level(log_level): # deprecated return
from __future__ import absolute_import, unicode_literals import sqliteschema import tabledata from ._null_logger import NullLogger MODULE_NAME = "simplesqlite" + _is_enable = False try: from loguru import logger logger.disable(MODULE_NAME) except ImportError: logger = NullLogger() def set_logger(is_enable): + global _is_enable + + if is_enable == _is_enable: + return + if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) tabledata.set_logger(is_enable) sqliteschema.set_logger(is_enable) try: import pytablereader pytablereader.set_logger(is_enable) except ImportError: pass def set_log_level(log_level): # deprecated return
7fdbe50d113a78fd02101056b56d44d917c5571c
joins/models.py
joins/models.py
from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email)
from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email) #To see the guide on using south, go here: #https://github.com/codingforentrepreneurs/Guides/blob/master/using_south_in_django.md
Add South Guide, made message for it
Add South Guide, made message for it
Python
mit
codingforentrepreneurs/launch-with-code,codingforentrepreneurs/launch-with-code,krishnazure/launch-with-code,krishnazure/launch-with-code,krishnazure/launch-with-code
from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email) + #To see the guide on using south, go here: + #https://github.com/codingforentrepreneurs/Guides/blob/master/using_south_in_django.md + +
Add South Guide, made message for it
## Code Before: from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email) ## Instruction: Add South Guide, made message for it ## Code After: from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email) #To see the guide on using south, go here: #https://github.com/codingforentrepreneurs/Guides/blob/master/using_south_in_django.md
from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email) + + #To see the guide on using south, go here: + #https://github.com/codingforentrepreneurs/Guides/blob/master/using_south_in_django.md +
373ce0f89a9253065114c757d3484849349a716d
tests/data_context/test_data_context_utils.py
tests/data_context/test_data_context_utils.py
import pytest import os from great_expectations.data_context.util import ( safe_mmkdir, ) def test_safe_mmkdir(tmp_path_factory): project_path = str(tmp_path_factory.mktemp('empty_dir')) first_path = os.path.join(project_path,"first_path") safe_mmkdir(first_path) assert os.path.isdir(first_path) with pytest.raises(TypeError): safe_mmkdir(1) #This should trigger python 2 second_path = os.path.join(project_path,"second_path") print(second_path) print(type(second_path)) safe_mmkdir(os.path.dirname(second_path))
import pytest import os import six from great_expectations.data_context.util import ( safe_mmkdir, ) def test_safe_mmkdir(tmp_path_factory): project_path = str(tmp_path_factory.mktemp('empty_dir')) first_path = os.path.join(project_path,"first_path") safe_mmkdir(first_path) assert os.path.isdir(first_path) with pytest.raises(TypeError): safe_mmkdir(1) #This should trigger python 2 if six.PY2: with pytest.raises(TypeError) as e: next_project_path = tmp_path_factory.mktemp('test_safe_mmkdir__dir_b') safe_mmkdir(next_project_path) assert e.value.message == "directory must be of type str, not {'directory_type': \"<class 'pathlib2.PosixPath'>\"}"
Add test for the intended use case
Add test for the intended use case
Python
apache-2.0
great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations,great-expectations/great_expectations
import pytest import os + import six from great_expectations.data_context.util import ( safe_mmkdir, ) def test_safe_mmkdir(tmp_path_factory): project_path = str(tmp_path_factory.mktemp('empty_dir')) first_path = os.path.join(project_path,"first_path") safe_mmkdir(first_path) assert os.path.isdir(first_path) with pytest.raises(TypeError): safe_mmkdir(1) #This should trigger python 2 - second_path = os.path.join(project_path,"second_path") - print(second_path) - print(type(second_path)) - safe_mmkdir(os.path.dirname(second_path)) + if six.PY2: + with pytest.raises(TypeError) as e: + next_project_path = tmp_path_factory.mktemp('test_safe_mmkdir__dir_b') + safe_mmkdir(next_project_path) + assert e.value.message == "directory must be of type str, not {'directory_type': \"<class 'pathlib2.PosixPath'>\"}" +
Add test for the intended use case
## Code Before: import pytest import os from great_expectations.data_context.util import ( safe_mmkdir, ) def test_safe_mmkdir(tmp_path_factory): project_path = str(tmp_path_factory.mktemp('empty_dir')) first_path = os.path.join(project_path,"first_path") safe_mmkdir(first_path) assert os.path.isdir(first_path) with pytest.raises(TypeError): safe_mmkdir(1) #This should trigger python 2 second_path = os.path.join(project_path,"second_path") print(second_path) print(type(second_path)) safe_mmkdir(os.path.dirname(second_path)) ## Instruction: Add test for the intended use case ## Code After: import pytest import os import six from great_expectations.data_context.util import ( safe_mmkdir, ) def test_safe_mmkdir(tmp_path_factory): project_path = str(tmp_path_factory.mktemp('empty_dir')) first_path = os.path.join(project_path,"first_path") safe_mmkdir(first_path) assert os.path.isdir(first_path) with pytest.raises(TypeError): safe_mmkdir(1) #This should trigger python 2 if six.PY2: with pytest.raises(TypeError) as e: next_project_path = tmp_path_factory.mktemp('test_safe_mmkdir__dir_b') safe_mmkdir(next_project_path) assert e.value.message == "directory must be of type str, not {'directory_type': \"<class 'pathlib2.PosixPath'>\"}"
import pytest import os + import six from great_expectations.data_context.util import ( safe_mmkdir, ) def test_safe_mmkdir(tmp_path_factory): project_path = str(tmp_path_factory.mktemp('empty_dir')) first_path = os.path.join(project_path,"first_path") safe_mmkdir(first_path) assert os.path.isdir(first_path) with pytest.raises(TypeError): safe_mmkdir(1) #This should trigger python 2 - second_path = os.path.join(project_path,"second_path") - print(second_path) - print(type(second_path)) - safe_mmkdir(os.path.dirname(second_path)) + if six.PY2: + with pytest.raises(TypeError) as e: + next_project_path = tmp_path_factory.mktemp('test_safe_mmkdir__dir_b') + safe_mmkdir(next_project_path) + + assert e.value.message == "directory must be of type str, not {'directory_type': \"<class 'pathlib2.PosixPath'>\"}"
daaf58639148b220d6dcce13e054374a68f9b01a
testfixtures/tests/test_docs.py
testfixtures/tests/test_docs.py
import unittest from glob import glob from os.path import dirname,join,pardir from doctest import DocFileSuite,REPORT_NDIFF,ELLIPSIS options = REPORT_NDIFF|ELLIPSIS def test_suite(): return unittest.TestSuite(( DocFileSuite( *glob(join(dirname(__file__),pardir,'docs','*.txt')), module_relative=False, optionflags=options ), ))
from glob import glob from manuel import doctest,codeblock from manuel.testing import TestSuite from os.path import dirname,join,pardir from doctest import REPORT_NDIFF,ELLIPSIS def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += codeblock.Manuel() return TestSuite( m, *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) )
Use Manuel instead of doctest.
Use Manuel instead of doctest.
Python
mit
nebulans/testfixtures,Simplistix/testfixtures
- - import unittest from glob import glob + from manuel import doctest,codeblock + from manuel.testing import TestSuite from os.path import dirname,join,pardir - from doctest import DocFileSuite,REPORT_NDIFF,ELLIPSIS + from doctest import REPORT_NDIFF,ELLIPSIS - - options = REPORT_NDIFF|ELLIPSIS def test_suite(): + m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) + m += codeblock.Manuel() - return unittest.TestSuite(( + return TestSuite( - DocFileSuite( + m, - *glob(join(dirname(__file__),pardir,'docs','*.txt')), + *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) - module_relative=False, - optionflags=options - ), - )) + )
Use Manuel instead of doctest.
## Code Before: import unittest from glob import glob from os.path import dirname,join,pardir from doctest import DocFileSuite,REPORT_NDIFF,ELLIPSIS options = REPORT_NDIFF|ELLIPSIS def test_suite(): return unittest.TestSuite(( DocFileSuite( *glob(join(dirname(__file__),pardir,'docs','*.txt')), module_relative=False, optionflags=options ), )) ## Instruction: Use Manuel instead of doctest. ## Code After: from glob import glob from manuel import doctest,codeblock from manuel.testing import TestSuite from os.path import dirname,join,pardir from doctest import REPORT_NDIFF,ELLIPSIS def test_suite(): m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) m += codeblock.Manuel() return TestSuite( m, *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) )
- - import unittest from glob import glob + from manuel import doctest,codeblock + from manuel.testing import TestSuite from os.path import dirname,join,pardir - from doctest import DocFileSuite,REPORT_NDIFF,ELLIPSIS ? ------------- + from doctest import REPORT_NDIFF,ELLIPSIS - - options = REPORT_NDIFF|ELLIPSIS def test_suite(): + m = doctest.Manuel(optionflags=REPORT_NDIFF|ELLIPSIS) + m += codeblock.Manuel() - return unittest.TestSuite(( ? --------- - + return TestSuite( - DocFileSuite( + m, - *glob(join(dirname(__file__),pardir,'docs','*.txt')), ? -------- - + *glob(join(dirname(__file__),pardir,pardir,'docs','*.txt')) ? +++++++ - module_relative=False, - optionflags=options - ), - )) ? - + )
7a936665eff8a6a8f6889334ad2238cbfcded18b
member.py
member.py
import requests from credentials import label_id from gmailauth import refresh access_token = refresh() headers = {'Authorization': ('Bearer ' + access_token)} def list_messages(headers): params = {'labelIds': label_id, 'q': 'newer_than:3d'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages', headers=headers, params=params) j = r.json() messages = [] if 'messages' in j: messages.extend(j['messages']) # return messages message_ids = [] for item in messages: message_ids.append(item['id']) return message_ids print(list_messages(headers)) def get_message(headers, identity): params = {'id': identity, format: 'metadata'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id', headers=headers, params=params) j = r.json() print(r.status_code, r.reason) h = j['payload'] subject = '' for header in h['headers']: if header['name'] == 'Subject': subject = header['value'] break print(subject) for item in list_messages(headers): get_message(headers, item) # get_message(headers, list_messages(headers))
import requests from base64 import urlsafe_b64decode from credentials import label_id, url1, url2 from gmailauth import refresh # access_token = refresh() headers = {'Authorization': ('Bearer ' + access_token)} def list_messages(headers): params = {'labelIds': label_id, 'q': 'newer_than:2d'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages', headers=headers, params=params) j = r.json() messages = [] if 'messages' in j: messages.extend(j['messages']) # return messages message_ids = [] for item in messages: message_ids.append(item['id']) return message_ids def get_message(headers, identity): params = {'id': identity, 'format': 'raw'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id', headers=headers, params=params) j = r.json() raw = j['raw'] d = urlsafe_b64decode(raw) p = d.decode() s = p.find('https') l = len(p) print(p[s:l]) print('----------') return(p[s:l]) # for item in list_messages(headers): # get_message(headers, item)
Return the order details URL from email body.
Return the order details URL from email body. There is currently no Agile API method that will return the order details for an activity so the URL from the email must be used in conjunction with a web scraper to get the relevant details.
Python
mit
deadlyraptor/reels
import requests + from base64 import urlsafe_b64decode - from credentials import label_id + from credentials import label_id, url1, url2 from gmailauth import refresh - access_token = refresh() + # access_token = refresh() headers = {'Authorization': ('Bearer ' + access_token)} def list_messages(headers): - params = {'labelIds': label_id, 'q': 'newer_than:3d'} + params = {'labelIds': label_id, 'q': 'newer_than:2d'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages', headers=headers, params=params) j = r.json() messages = [] if 'messages' in j: messages.extend(j['messages']) # return messages message_ids = [] for item in messages: message_ids.append(item['id']) return message_ids - print(list_messages(headers)) - def get_message(headers, identity): - params = {'id': identity, format: 'metadata'} + params = {'id': identity, 'format': 'raw'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id', headers=headers, params=params) j = r.json() - print(r.status_code, r.reason) - h = j['payload'] - subject = '' - for header in h['headers']: - if header['name'] == 'Subject': - subject = header['value'] - break - print(subject) + raw = j['raw'] + d = urlsafe_b64decode(raw) + p = d.decode() + s = p.find('https') + l = len(p) + print(p[s:l]) + print('----------') + return(p[s:l]) - for item in list_messages(headers): + # for item in list_messages(headers): - get_message(headers, item) + # get_message(headers, item) - # get_message(headers, list_messages(headers)) -
Return the order details URL from email body.
## Code Before: import requests from credentials import label_id from gmailauth import refresh access_token = refresh() headers = {'Authorization': ('Bearer ' + access_token)} def list_messages(headers): params = {'labelIds': label_id, 'q': 'newer_than:3d'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages', headers=headers, params=params) j = r.json() messages = [] if 'messages' in j: messages.extend(j['messages']) # return messages message_ids = [] for item in messages: message_ids.append(item['id']) return message_ids print(list_messages(headers)) def get_message(headers, identity): params = {'id': identity, format: 'metadata'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id', headers=headers, params=params) j = r.json() print(r.status_code, r.reason) h = j['payload'] subject = '' for header in h['headers']: if header['name'] == 'Subject': subject = header['value'] break print(subject) for item in list_messages(headers): get_message(headers, item) # get_message(headers, list_messages(headers)) ## Instruction: Return the order details URL from email body. ## Code After: import requests from base64 import urlsafe_b64decode from credentials import label_id, url1, url2 from gmailauth import refresh # access_token = refresh() headers = {'Authorization': ('Bearer ' + access_token)} def list_messages(headers): params = {'labelIds': label_id, 'q': 'newer_than:2d'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages', headers=headers, params=params) j = r.json() messages = [] if 'messages' in j: messages.extend(j['messages']) # return messages message_ids = [] for item in messages: message_ids.append(item['id']) return message_ids def get_message(headers, identity): params = {'id': identity, 'format': 'raw'} r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id', headers=headers, params=params) j = r.json() raw = j['raw'] d = urlsafe_b64decode(raw) p = d.decode() s = p.find('https') l = len(p) print(p[s:l]) print('----------') return(p[s:l]) # for item in list_messages(headers): # get_message(headers, item)
import requests + from base64 import urlsafe_b64decode - from credentials import label_id + from credentials import label_id, url1, url2 ? ++++++++++++ from gmailauth import refresh - access_token = refresh() + # access_token = refresh() ? ++ headers = {'Authorization': ('Bearer ' + access_token)} def list_messages(headers): - params = {'labelIds': label_id, 'q': 'newer_than:3d'} ? ^ + params = {'labelIds': label_id, 'q': 'newer_than:2d'} ? ^ r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages', headers=headers, params=params) j = r.json() messages = [] if 'messages' in j: messages.extend(j['messages']) # return messages message_ids = [] for item in messages: message_ids.append(item['id']) return message_ids - print(list_messages(headers)) - def get_message(headers, identity): - params = {'id': identity, format: 'metadata'} ? ^^^ ^^^^ + params = {'id': identity, 'format': 'raw'} ? + + ^ ^ r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id', headers=headers, params=params) j = r.json() - print(r.status_code, r.reason) - h = j['payload'] - subject = '' - for header in h['headers']: - if header['name'] == 'Subject': - subject = header['value'] - break - print(subject) + raw = j['raw'] + d = urlsafe_b64decode(raw) + p = d.decode() + s = p.find('https') + l = len(p) + print(p[s:l]) + print('----------') + return(p[s:l]) - for item in list_messages(headers): + # for item in list_messages(headers): ? ++ - get_message(headers, item) + # get_message(headers, item) ? ++ - - # get_message(headers, list_messages(headers))
fac280a022c8728f14bbe1194cf74af761b7ec3f
vfp2py/__main__.py
vfp2py/__main__.py
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) global SEARCH_PATH SEARCH_PATH = args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) vfp2py.SEARCH_PATH += args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
Fix search paths not being added from arguments.
Fix search paths not being added from arguments.
Python
mit
mwisslead/vfp2py,mwisslead/vfp2py
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) - global SEARCH_PATH - SEARCH_PATH = args.search + vfp2py.SEARCH_PATH += args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
Fix search paths not being added from arguments.
## Code Before: import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) global SEARCH_PATH SEARCH_PATH = args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass ## Instruction: Fix search paths not being added from arguments. ## Code After: import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) vfp2py.SEARCH_PATH += args.search vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
import argparse import vfp2py def parse_args(argv=None): parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python') parser.add_argument("infile", help="file to convert", type=str) parser.add_argument("outfile", help="file to output to", type=str) parser.add_argument("search", help="directories to search for included files", type=str, nargs='*') return parser.parse_args(argv) def main(argv=None): args = parse_args(argv) - global SEARCH_PATH - SEARCH_PATH = args.search + vfp2py.SEARCH_PATH += args.search ? +++++++ + vfp2py.convert_file(args.infile, args.outfile) if __name__ == '__main__': try: main() except KeyboardInterrupt: pass
19c46fd57e04a026c6e52e1be9ba265a82d651f1
walletname/__init__.py
walletname/__init__.py
__author__ = 'mdavid' import json import re import requests from blockexplorer.settings import WNS_URL_BASE WALLET_NAME_RE = re.compile('^([0-9a-z][0-9a-z\-]*\.)+[a-z]{2,}$') TIMEOUT_IN_SECONDS = 20 def is_valid_wallet_name(string): return WALLET_NAME_RE.match(string) def lookup_wallet_name(wallet_name, currency='btc', wns_base=WNS_URL_BASE): assert is_valid_wallet_name(wallet_name) r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) rdict = json.loads(r.text) if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: return rdict.get('wallet_address') return None
__author__ = 'mdavid' import json import re import requests from blockexplorer.settings import WNS_URL_BASE WALLET_NAME_RE = re.compile('^([0-9a-z][0-9a-z\-]*\.)+[a-z]{2,}$') TIMEOUT_IN_SECONDS = 20 def is_valid_wallet_name(string): return WALLET_NAME_RE.match(string) def lookup_wallet_name(wallet_name, currency='btc', wns_base=WNS_URL_BASE): assert is_valid_wallet_name(wallet_name) try: r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) rdict = json.loads(r.text) if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: return rdict.get('wallet_address') except: pass return None
Add try/except block around lookup in lookup_wallet_name function
Add try/except block around lookup in lookup_wallet_name function
Python
apache-2.0
ychaim/explorer,blockcypher/explorer,blockcypher/explorer,ychaim/explorer,ychaim/explorer,blockcypher/explorer
__author__ = 'mdavid' import json import re import requests from blockexplorer.settings import WNS_URL_BASE WALLET_NAME_RE = re.compile('^([0-9a-z][0-9a-z\-]*\.)+[a-z]{2,}$') TIMEOUT_IN_SECONDS = 20 def is_valid_wallet_name(string): return WALLET_NAME_RE.match(string) def lookup_wallet_name(wallet_name, currency='btc', wns_base=WNS_URL_BASE): assert is_valid_wallet_name(wallet_name) + try: - r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) + r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) - rdict = json.loads(r.text) + rdict = json.loads(r.text) - if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: + if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: - return rdict.get('wallet_address') + return rdict.get('wallet_address') + except: + pass return None
Add try/except block around lookup in lookup_wallet_name function
## Code Before: __author__ = 'mdavid' import json import re import requests from blockexplorer.settings import WNS_URL_BASE WALLET_NAME_RE = re.compile('^([0-9a-z][0-9a-z\-]*\.)+[a-z]{2,}$') TIMEOUT_IN_SECONDS = 20 def is_valid_wallet_name(string): return WALLET_NAME_RE.match(string) def lookup_wallet_name(wallet_name, currency='btc', wns_base=WNS_URL_BASE): assert is_valid_wallet_name(wallet_name) r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) rdict = json.loads(r.text) if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: return rdict.get('wallet_address') return None ## Instruction: Add try/except block around lookup in lookup_wallet_name function ## Code After: __author__ = 'mdavid' import json import re import requests from blockexplorer.settings import WNS_URL_BASE WALLET_NAME_RE = re.compile('^([0-9a-z][0-9a-z\-]*\.)+[a-z]{2,}$') TIMEOUT_IN_SECONDS = 20 def is_valid_wallet_name(string): return WALLET_NAME_RE.match(string) def lookup_wallet_name(wallet_name, currency='btc', wns_base=WNS_URL_BASE): assert is_valid_wallet_name(wallet_name) try: r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) rdict = json.loads(r.text) if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: return rdict.get('wallet_address') except: pass return None
__author__ = 'mdavid' import json import re import requests from blockexplorer.settings import WNS_URL_BASE WALLET_NAME_RE = re.compile('^([0-9a-z][0-9a-z\-]*\.)+[a-z]{2,}$') TIMEOUT_IN_SECONDS = 20 def is_valid_wallet_name(string): return WALLET_NAME_RE.match(string) def lookup_wallet_name(wallet_name, currency='btc', wns_base=WNS_URL_BASE): assert is_valid_wallet_name(wallet_name) + try: - r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) + r = requests.get('%s/%s/%s' % (wns_base, wallet_name, currency), verify=True, timeout=TIMEOUT_IN_SECONDS) ? ++++ - rdict = json.loads(r.text) + rdict = json.loads(r.text) ? ++++ - if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: + if rdict.get('success', False) and rdict.get('wallet_name','') == wallet_name and rdict.get('currency','') == currency: ? ++++ - return rdict.get('wallet_address') + return rdict.get('wallet_address') ? ++++ + except: + pass return None
099a0b045548d5a93707a9ef99bece2578ed50ea
user_voting/models.py
user_voting/models.py
from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.db import models from user_voting.managers import VoteManager SCORES = ( (u'+1', +1), (u'-1', -1), (u'?', 0), ) class Vote(models.Model): """ A vote on an object by a User. """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() object = generic.GenericForeignKey('content_type', 'object_id') score = models.SmallIntegerField() objects = VoteManager() class Meta: db_table = 'user_votes' # One vote per user per object unique_together = (('user', 'content_type', 'object_id'),) def __unicode__(self): return u'%s: score %d by %s' % (self.object, self.score, self.user)
from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.db import models from user_voting.managers import VoteManager SCORES = ( (u'+1', +1), (u'-1', -1), (u'?', 0), ) class Vote(models.Model): """ A vote on an object by a User. """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() object = generic.GenericForeignKey('content_type', 'object_id') score = models.SmallIntegerField() date = models.DateTimeField(auto_now=True) objects = VoteManager() class Meta: db_table = 'user_votes' # One vote per user per object unique_together = (('user', 'content_type', 'object_id'),) def __unicode__(self): return u'%s: score %d by %s' % (self.object, self.score, self.user)
Add date field for timestamps
user_voting: Add date field for timestamps
Python
agpl-3.0
kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu
from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.db import models from user_voting.managers import VoteManager SCORES = ( (u'+1', +1), (u'-1', -1), (u'?', 0), ) class Vote(models.Model): """ A vote on an object by a User. """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() object = generic.GenericForeignKey('content_type', 'object_id') score = models.SmallIntegerField() + date = models.DateTimeField(auto_now=True) objects = VoteManager() class Meta: db_table = 'user_votes' # One vote per user per object unique_together = (('user', 'content_type', 'object_id'),) def __unicode__(self): return u'%s: score %d by %s' % (self.object, self.score, self.user)
Add date field for timestamps
## Code Before: from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.db import models from user_voting.managers import VoteManager SCORES = ( (u'+1', +1), (u'-1', -1), (u'?', 0), ) class Vote(models.Model): """ A vote on an object by a User. """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() object = generic.GenericForeignKey('content_type', 'object_id') score = models.SmallIntegerField() objects = VoteManager() class Meta: db_table = 'user_votes' # One vote per user per object unique_together = (('user', 'content_type', 'object_id'),) def __unicode__(self): return u'%s: score %d by %s' % (self.object, self.score, self.user) ## Instruction: Add date field for timestamps ## Code After: from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.db import models from user_voting.managers import VoteManager SCORES = ( (u'+1', +1), (u'-1', -1), (u'?', 0), ) class Vote(models.Model): """ A vote on an object by a User. """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() object = generic.GenericForeignKey('content_type', 'object_id') score = models.SmallIntegerField() date = models.DateTimeField(auto_now=True) objects = VoteManager() class Meta: db_table = 'user_votes' # One vote per user per object unique_together = (('user', 'content_type', 'object_id'),) def __unicode__(self): return u'%s: score %d by %s' % (self.object, self.score, self.user)
from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from django.db import models from user_voting.managers import VoteManager SCORES = ( (u'+1', +1), (u'-1', -1), (u'?', 0), ) class Vote(models.Model): """ A vote on an object by a User. """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() object = generic.GenericForeignKey('content_type', 'object_id') score = models.SmallIntegerField() + date = models.DateTimeField(auto_now=True) objects = VoteManager() class Meta: db_table = 'user_votes' # One vote per user per object unique_together = (('user', 'content_type', 'object_id'),) def __unicode__(self): return u'%s: score %d by %s' % (self.object, self.score, self.user)
0ee59d04cb2cbe93a3f4f87a34725fbcd1a66fc0
core/Reader.py
core/Reader.py
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 current_position = -min_position ar_index = list() element = deque(stream.read(length)) while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) if len(element) == length: while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index else: stream.close() raise ValueError("Not enough characters to parse : " + str(len(element)))
Add not enough characters condition
Add not enough characters condition
Python
mit
JCH222/matriochkas
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() - stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 + stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) + if len(element) == length: + while True: + result = parsing_pipeline.check(element, ref_position=-min_position) + if result is not None and result[0]: + ar_index.append((current_position, element[-min_position])) + next_character = stream.read(1) + current_position += 1 + if next_character and result is not None: + element.popleft() + element.append(next_character) + else: + break + stream.close() + return ar_index - while True: - result = parsing_pipeline.check(element, ref_position=-min_position) - if result is not None and result[0]: - ar_index.append((current_position, element[-min_position])) - next_character = stream.read(1) - current_position += 1 - if next_character and result is not None: - element.popleft() - element.append(next_character) - else: + else: - break + stream.close() + raise ValueError("Not enough characters to parse : " + str(len(element))) - stream.close() - return ar_index -
Add not enough characters condition
## Code Before: from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 current_position = -min_position ar_index = list() element = deque(stream.read(length)) while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index ## Instruction: Add not enough characters condition ## Code After: from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) if len(element) == length: while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index else: stream.close() raise ValueError("Not enough characters to parse : " + str(len(element)))
from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() - stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 + stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) + if len(element) == length: + while True: + result = parsing_pipeline.check(element, ref_position=-min_position) + if result is not None and result[0]: + ar_index.append((current_position, element[-min_position])) + next_character = stream.read(1) + current_position += 1 + if next_character and result is not None: + element.popleft() + element.append(next_character) + else: + break - while True: - result = parsing_pipeline.check(element, ref_position=-min_position) - if result is not None and result[0]: - ar_index.append((current_position, element[-min_position])) - next_character = stream.read(1) - current_position += 1 - if next_character and result is not None: - element.popleft() - element.append(next_character) - else: - break - - stream.close() + stream.close() ? ++++ - return ar_index + return ar_index ? ++++ + else: + stream.close() + raise ValueError("Not enough characters to parse : " + str(len(element)))
b57be89c94d050dd1e5f4279f91170982b00cc2e
polyaxon/clusters/management/commands/clean_experiments.py
polyaxon/clusters/management/commands/clean_experiments.py
from django.core.management import BaseCommand from experiments.models import Experiment from spawner import scheduler from spawner.utils.constants import ExperimentLifeCycle class Command(BaseCommand): def handle(self, *args, **options): for experiment in Experiment.objects.filter( experiment_status__status__in=ExperimentLifeCycle.RUNNING_STATUS): scheduler.stop_experiment(experiment)
from django.core.management import BaseCommand from experiments.models import Experiment from spawner import scheduler from spawner.utils.constants import ExperimentLifeCycle class Command(BaseCommand): def handle(self, *args, **options): for experiment in Experiment.objects.filter( experiment_status__status__in=ExperimentLifeCycle.RUNNING_STATUS): scheduler.stop_experiment(experiment, update_status=True)
Update status when stopping experiments
Update status when stopping experiments
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
from django.core.management import BaseCommand from experiments.models import Experiment from spawner import scheduler from spawner.utils.constants import ExperimentLifeCycle class Command(BaseCommand): def handle(self, *args, **options): for experiment in Experiment.objects.filter( experiment_status__status__in=ExperimentLifeCycle.RUNNING_STATUS): - scheduler.stop_experiment(experiment) + scheduler.stop_experiment(experiment, update_status=True)
Update status when stopping experiments
## Code Before: from django.core.management import BaseCommand from experiments.models import Experiment from spawner import scheduler from spawner.utils.constants import ExperimentLifeCycle class Command(BaseCommand): def handle(self, *args, **options): for experiment in Experiment.objects.filter( experiment_status__status__in=ExperimentLifeCycle.RUNNING_STATUS): scheduler.stop_experiment(experiment) ## Instruction: Update status when stopping experiments ## Code After: from django.core.management import BaseCommand from experiments.models import Experiment from spawner import scheduler from spawner.utils.constants import ExperimentLifeCycle class Command(BaseCommand): def handle(self, *args, **options): for experiment in Experiment.objects.filter( experiment_status__status__in=ExperimentLifeCycle.RUNNING_STATUS): scheduler.stop_experiment(experiment, update_status=True)
from django.core.management import BaseCommand from experiments.models import Experiment from spawner import scheduler from spawner.utils.constants import ExperimentLifeCycle class Command(BaseCommand): def handle(self, *args, **options): for experiment in Experiment.objects.filter( experiment_status__status__in=ExperimentLifeCycle.RUNNING_STATUS): - scheduler.stop_experiment(experiment) + scheduler.stop_experiment(experiment, update_status=True) ? ++++++++++++++++++++
4d01eb0c1b11680d463d4fcb0888fac4ab6c45c8
panoptes/utils/data.py
panoptes/utils/data.py
import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
Use shutil instead of `os.rename`
Use shutil instead of `os.rename`
Python
mit
panoptes/POCS,AstroHuntsman/POCS,joshwalawender/POCS,AstroHuntsman/POCS,joshwalawender/POCS,AstroHuntsman/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,joshwalawender/POCS,panoptes/POCS
import os + import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: - os.rename(df, dest) + shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
Use shutil instead of `os.rename`
## Code Before: import os import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: os.rename(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder) ## Instruction: Use shutil instead of `os.rename` ## Code After: import os import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: shutil.move(df, dest) except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
import os + import shutil import argparse from astropy.utils import data from astroplan import download_IERS_A def download_all_files(data_folder="{}/astrometry/data".format(os.getenv('PANDIR'))): download_IERS_A() for i in range(4214, 4219): fn = 'index-{}.fits'.format(i) dest = "{}/{}".format(data_folder, fn) if not os.path.exists(dest): url = "http://data.astrometry.net/4200/{}".format(fn) df = data.download_file(url) try: - os.rename(df, dest) ? ^^^^^^^ + shutil.move(df, dest) ? ++++++++ ^ except OSError as e: print("Problem saving. (Maybe permissions?): {}".format(e)) if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('--folder', help='Folder to place astrometry data') args = parser.parse_args() if not os.path.exists(args.folder): print("{} does not exist.".format(args.folder)) download_all_files(data_folder=args.folder)
52e5c8ebb74622d2cb2a378f98563cb615115a21
uptick/tools.py
uptick/tools.py
import click from .decorators import online from .main import main, config from .ui import print_table, print_tx @main.group() def tools(): """ Further tools """ pass @tools.command() @click.pass_context @online @click.argument("account") def getcloudloginkey(ctx, account): """ Return keys for cloudlogin """ from bitsharesbase.account import PasswordKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["role", "wif", "pubkey", "accounts"]] for role in ["owner", "active", "memo"]: wif = PasswordKey(account, password, role=role) pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ role, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) print_table(t)
import click from .decorators import online from .main import main, config from .ui import print_table, print_tx @main.group() def tools(): """ Further tools """ pass @tools.command() @click.pass_context @online @click.argument("account") def getcloudloginkey(ctx, account): """ Return keys for cloudlogin """ from bitsharesbase.account import PasswordKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["role", "wif", "pubkey", "accounts"]] for role in ["owner", "active", "memo"]: wif = PasswordKey(account, password, role=role) pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ role, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) print_table(t) @tools.command() @click.pass_context @online @click.option("--limit", default=10, type=int) def getbrainkeys(ctx, limit): """ Return keys for cloudlogin """ from bitsharesbase.account import BrainKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["index", "wif", "pubkey", "accounts"]] wif = BrainKey(password) for i in range(limit): pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ i, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) next(wif) print_table(t)
Allow to manually generate brainkeys
Allow to manually generate brainkeys
Python
mit
xeroc/uptick
import click from .decorators import online from .main import main, config from .ui import print_table, print_tx @main.group() def tools(): """ Further tools """ pass @tools.command() @click.pass_context @online @click.argument("account") def getcloudloginkey(ctx, account): """ Return keys for cloudlogin """ from bitsharesbase.account import PasswordKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["role", "wif", "pubkey", "accounts"]] for role in ["owner", "active", "memo"]: wif = PasswordKey(account, password, role=role) pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ role, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) print_table(t) + + @tools.command() + @click.pass_context + @online + @click.option("--limit", default=10, type=int) + def getbrainkeys(ctx, limit): + """ Return keys for cloudlogin + """ + from bitsharesbase.account import BrainKey + + password = click.prompt("Passphrase", hide_input=True).strip() + t = [["index", "wif", "pubkey", "accounts"]] + wif = BrainKey(password) + for i in range(limit): + pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) + + t.append( + [ + i, + str(wif.get_private_key()), + pubkey, + ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", + ] + ) + next(wif) + + print_table(t) +
Allow to manually generate brainkeys
## Code Before: import click from .decorators import online from .main import main, config from .ui import print_table, print_tx @main.group() def tools(): """ Further tools """ pass @tools.command() @click.pass_context @online @click.argument("account") def getcloudloginkey(ctx, account): """ Return keys for cloudlogin """ from bitsharesbase.account import PasswordKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["role", "wif", "pubkey", "accounts"]] for role in ["owner", "active", "memo"]: wif = PasswordKey(account, password, role=role) pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ role, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) print_table(t) ## Instruction: Allow to manually generate brainkeys ## Code After: import click from .decorators import online from .main import main, config from .ui import print_table, print_tx @main.group() def tools(): """ Further tools """ pass @tools.command() @click.pass_context @online @click.argument("account") def getcloudloginkey(ctx, account): """ Return keys for cloudlogin """ from bitsharesbase.account import PasswordKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["role", "wif", "pubkey", "accounts"]] for role in ["owner", "active", "memo"]: wif = PasswordKey(account, password, role=role) pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ role, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) print_table(t) @tools.command() @click.pass_context @online @click.option("--limit", default=10, type=int) def getbrainkeys(ctx, limit): """ Return keys for cloudlogin """ from bitsharesbase.account import BrainKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["index", "wif", "pubkey", "accounts"]] wif = BrainKey(password) for i in range(limit): pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ i, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) next(wif) print_table(t)
import click from .decorators import online from .main import main, config from .ui import print_table, print_tx @main.group() def tools(): """ Further tools """ pass @tools.command() @click.pass_context @online @click.argument("account") def getcloudloginkey(ctx, account): """ Return keys for cloudlogin """ from bitsharesbase.account import PasswordKey password = click.prompt("Passphrase", hide_input=True).strip() t = [["role", "wif", "pubkey", "accounts"]] for role in ["owner", "active", "memo"]: wif = PasswordKey(account, password, role=role) pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) t.append( [ role, str(wif.get_private_key()), pubkey, ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", ] ) print_table(t) + + + @tools.command() + @click.pass_context + @online + @click.option("--limit", default=10, type=int) + def getbrainkeys(ctx, limit): + """ Return keys for cloudlogin + """ + from bitsharesbase.account import BrainKey + + password = click.prompt("Passphrase", hide_input=True).strip() + t = [["index", "wif", "pubkey", "accounts"]] + wif = BrainKey(password) + for i in range(limit): + pubkey = format(wif.get_public_key(), ctx.bitshares.rpc.chain_params["prefix"]) + + t.append( + [ + i, + str(wif.get_private_key()), + pubkey, + ctx.bitshares.wallet.getAccountFromPublicKey(pubkey) or "", + ] + ) + next(wif) + + print_table(t)
a9892cc1fcb7d2911f6afba52a06a6f1c1ed9b25
tests/test_wrap_modes.py
tests/test_wrap_modes.py
from hypothesis_auto import auto_pytest_magic from isort import wrap_modes auto_pytest_magic(wrap_modes.grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped_no_comma, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.noqa, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic( wrap_modes.vertical_prefix_from_module_import, auto_allow_exceptions_=(ValueError,) )
from hypothesis_auto import auto_pytest_magic from isort import wrap_modes auto_pytest_magic(wrap_modes.grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped_no_comma, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.noqa, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic( wrap_modes.vertical_prefix_from_module_import, auto_allow_exceptions_=(ValueError,) ) auto_pytest_magic(wrap_modes.vertical_hanging_indent_bracket, auto_allow_exceptions_=(ValueError,)) def test_wrap_mode_interface(): assert ( wrap_modes._wrap_mode_interface("statement", [], "", "", 80, [], "", "", True, True) == "" )
Increase wrap mode test coverage
Increase wrap mode test coverage
Python
mit
PyCQA/isort,PyCQA/isort
from hypothesis_auto import auto_pytest_magic from isort import wrap_modes auto_pytest_magic(wrap_modes.grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped_no_comma, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.noqa, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic( wrap_modes.vertical_prefix_from_module_import, auto_allow_exceptions_=(ValueError,) ) + auto_pytest_magic(wrap_modes.vertical_hanging_indent_bracket, auto_allow_exceptions_=(ValueError,)) + + def test_wrap_mode_interface(): + assert ( + wrap_modes._wrap_mode_interface("statement", [], "", "", 80, [], "", "", True, True) == "" + ) +
Increase wrap mode test coverage
## Code Before: from hypothesis_auto import auto_pytest_magic from isort import wrap_modes auto_pytest_magic(wrap_modes.grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped_no_comma, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.noqa, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic( wrap_modes.vertical_prefix_from_module_import, auto_allow_exceptions_=(ValueError,) ) ## Instruction: Increase wrap mode test coverage ## Code After: from hypothesis_auto import auto_pytest_magic from isort import wrap_modes auto_pytest_magic(wrap_modes.grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped_no_comma, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.noqa, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic( wrap_modes.vertical_prefix_from_module_import, auto_allow_exceptions_=(ValueError,) ) auto_pytest_magic(wrap_modes.vertical_hanging_indent_bracket, auto_allow_exceptions_=(ValueError,)) def test_wrap_mode_interface(): assert ( wrap_modes._wrap_mode_interface("statement", [], "", "", 80, [], "", "", True, True) == "" )
from hypothesis_auto import auto_pytest_magic from isort import wrap_modes auto_pytest_magic(wrap_modes.grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_hanging_indent, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.vertical_grid_grouped_no_comma, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic(wrap_modes.noqa, auto_allow_exceptions_=(ValueError,)) auto_pytest_magic( wrap_modes.vertical_prefix_from_module_import, auto_allow_exceptions_=(ValueError,) ) + auto_pytest_magic(wrap_modes.vertical_hanging_indent_bracket, auto_allow_exceptions_=(ValueError,)) + + + def test_wrap_mode_interface(): + assert ( + wrap_modes._wrap_mode_interface("statement", [], "", "", 80, [], "", "", True, True) == "" + )
42c7b4c7b74a3aeccca73f368a16a2f96295ff3b
radar/radar/models/user_sessions.py
radar/radar/models/user_sessions.py
from sqlalchemy import String, Column, Integer, ForeignKey, DateTime, Index from sqlalchemy.dialects import postgresql from sqlalchemy.orm import relationship from radar.database import db from radar.models.users import User, AnonymousUser from radar.models.logs import log_changes @log_changes class UserSession(db.Model): __tablename__ = 'user_sessions' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id'), nullable=False) user = relationship(User) date = Column(DateTime(timezone=True), nullable=False) ip_address = Column(postgresql.INET, nullable=False) user_agent = Column(String, nullable=True) @classmethod def is_authenticated(cls): return True Index('user_sessions_user_idx', UserSession.user_id) class AnonymousSession(object): user = AnonymousUser() @classmethod def is_authenticated(cls): return False
from sqlalchemy import String, Column, Integer, ForeignKey, DateTime, Index from sqlalchemy.dialects import postgresql from sqlalchemy.orm import relationship, backref from radar.database import db from radar.models.users import User, AnonymousUser from radar.models.logs import log_changes @log_changes class UserSession(db.Model): __tablename__ = 'user_sessions' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False) user = relationship('User', backref=backref('user_sessions', cascade='all, delete-orphan', passive_deletes=True)) date = Column(DateTime(timezone=True), nullable=False) ip_address = Column(postgresql.INET, nullable=False) user_agent = Column(String, nullable=True) @classmethod def is_authenticated(cls): return True Index('user_sessions_user_idx', UserSession.user_id) class AnonymousSession(object): user = AnonymousUser() @classmethod def is_authenticated(cls): return False
Delete user sessions with user
Delete user sessions with user
Python
agpl-3.0
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
from sqlalchemy import String, Column, Integer, ForeignKey, DateTime, Index from sqlalchemy.dialects import postgresql - from sqlalchemy.orm import relationship + from sqlalchemy.orm import relationship, backref from radar.database import db from radar.models.users import User, AnonymousUser from radar.models.logs import log_changes @log_changes class UserSession(db.Model): __tablename__ = 'user_sessions' id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey('users.id'), nullable=False) + user_id = Column(Integer, ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False) - user = relationship(User) + user = relationship('User', backref=backref('user_sessions', cascade='all, delete-orphan', passive_deletes=True)) date = Column(DateTime(timezone=True), nullable=False) ip_address = Column(postgresql.INET, nullable=False) user_agent = Column(String, nullable=True) @classmethod def is_authenticated(cls): return True Index('user_sessions_user_idx', UserSession.user_id) class AnonymousSession(object): user = AnonymousUser() @classmethod def is_authenticated(cls): return False
Delete user sessions with user
## Code Before: from sqlalchemy import String, Column, Integer, ForeignKey, DateTime, Index from sqlalchemy.dialects import postgresql from sqlalchemy.orm import relationship from radar.database import db from radar.models.users import User, AnonymousUser from radar.models.logs import log_changes @log_changes class UserSession(db.Model): __tablename__ = 'user_sessions' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id'), nullable=False) user = relationship(User) date = Column(DateTime(timezone=True), nullable=False) ip_address = Column(postgresql.INET, nullable=False) user_agent = Column(String, nullable=True) @classmethod def is_authenticated(cls): return True Index('user_sessions_user_idx', UserSession.user_id) class AnonymousSession(object): user = AnonymousUser() @classmethod def is_authenticated(cls): return False ## Instruction: Delete user sessions with user ## Code After: from sqlalchemy import String, Column, Integer, ForeignKey, DateTime, Index from sqlalchemy.dialects import postgresql from sqlalchemy.orm import relationship, backref from radar.database import db from radar.models.users import User, AnonymousUser from radar.models.logs import log_changes @log_changes class UserSession(db.Model): __tablename__ = 'user_sessions' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False) user = relationship('User', backref=backref('user_sessions', cascade='all, delete-orphan', passive_deletes=True)) date = Column(DateTime(timezone=True), nullable=False) ip_address = Column(postgresql.INET, nullable=False) user_agent = Column(String, nullable=True) @classmethod def is_authenticated(cls): return True Index('user_sessions_user_idx', UserSession.user_id) class AnonymousSession(object): user = AnonymousUser() @classmethod def is_authenticated(cls): return False
from sqlalchemy import String, Column, Integer, ForeignKey, DateTime, Index from sqlalchemy.dialects import postgresql - from sqlalchemy.orm import relationship + from sqlalchemy.orm import relationship, backref ? +++++++++ from radar.database import db from radar.models.users import User, AnonymousUser from radar.models.logs import log_changes @log_changes class UserSession(db.Model): __tablename__ = 'user_sessions' id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey('users.id'), nullable=False) + user_id = Column(Integer, ForeignKey('users.id', onupdate='CASCADE', ondelete='CASCADE'), nullable=False) ? ++++++++++++++++++++++++++++++++++++++++ - user = relationship(User) + user = relationship('User', backref=backref('user_sessions', cascade='all, delete-orphan', passive_deletes=True)) date = Column(DateTime(timezone=True), nullable=False) ip_address = Column(postgresql.INET, nullable=False) user_agent = Column(String, nullable=True) @classmethod def is_authenticated(cls): return True Index('user_sessions_user_idx', UserSession.user_id) class AnonymousSession(object): user = AnonymousUser() @classmethod def is_authenticated(cls): return False
42a1aaba8daa253b99f444a512f8231db47dfbb2
helpers.py
helpers.py
import array import numpy as np def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples
import array import numpy as np import pandas as pd def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples def convert_to_labeled_df(df): """ Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. This Data Frame can be used to easily get accuarcy values for cross-validation """ result = [] for idx, row in df.iterrows(): context = row.Context result.append([context, row.iloc[1], 1]) for distractor in row.iloc[2:]: result.append([context, distractor, 0]) return pd.DataFrame(result, columns=["Context", "Utterance", "Label"])
Add dataset conversion helper function
Add dataset conversion helper function
Python
mit
AotY/chatbot-retrieval,LepiorzDaniel/test2
import array import numpy as np + import pandas as pd def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples + + def convert_to_labeled_df(df): + """ + Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. + This Data Frame can be used to easily get accuarcy values for cross-validation + """ + result = [] + for idx, row in df.iterrows(): + context = row.Context + result.append([context, row.iloc[1], 1]) + for distractor in row.iloc[2:]: + result.append([context, distractor, 0]) + return pd.DataFrame(result, columns=["Context", "Utterance", "Label"]) +
Add dataset conversion helper function
## Code Before: import array import numpy as np def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples ## Instruction: Add dataset conversion helper function ## Code After: import array import numpy as np import pandas as pd def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples def convert_to_labeled_df(df): """ Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. This Data Frame can be used to easily get accuarcy values for cross-validation """ result = [] for idx, row in df.iterrows(): context = row.Context result.append([context, row.iloc[1], 1]) for distractor in row.iloc[2:]: result.append([context, distractor, 0]) return pd.DataFrame(result, columns=["Context", "Utterance", "Label"])
import array import numpy as np + import pandas as pd def load_glove_vectors(filename, vocab=None): """ Load glove vectors from a .txt file. Optionally limit the vocabulary to save memory. `vocab` should be a set. """ dct = {} vectors = array.array('d') current_idx = 0 with open(filename, "r", encoding="utf-8") as f: for _, line in enumerate(f): tokens = line.split(" ") word = tokens[0] entries = tokens[1:] if not vocab or word in vocab: dct[word] = current_idx vectors.extend(float(x) for x in entries) current_idx += 1 word_dim = len(entries) num_vectors = len(dct) return [np.array(vectors).reshape(num_vectors, word_dim), dct] def evaluate_recall(y, y_labels, n=1): num_examples = float(len(y)) num_correct = 0 for predictions, label in zip(y, y_labels): if label in predictions[:n]: num_correct += 1 return num_correct/num_examples + + + def convert_to_labeled_df(df): + """ + Converts the test/validation data from the Ubuntu Dialog corpus into a train-like Data Frame with labels. + This Data Frame can be used to easily get accuarcy values for cross-validation + """ + result = [] + for idx, row in df.iterrows(): + context = row.Context + result.append([context, row.iloc[1], 1]) + for distractor in row.iloc[2:]: + result.append([context, distractor, 0]) + return pd.DataFrame(result, columns=["Context", "Utterance", "Label"])
af3525bf174d0774b61464f9cc8ab8441babc7ae
examples/flask_alchemy/test_demoapp.py
examples/flask_alchemy/test_demoapp.py
import os import unittest import tempfile import demoapp import demoapp_factories class DemoAppTestCase(unittest.TestCase): def setUp(self): demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' demoapp.app.config['TESTING'] = True self.app = demoapp.app.test_client() self.db = demoapp.db self.db.create_all() def tearDown(self): self.db.drop_all() def test_user_factory(self): user = demoapp_factories.UserFactory() self.db.session.commit() self.assertIsNotNone(user.id) self.assertEqual(1, len(demoapp.User.query.all())) def test_userlog_factory(self): userlog = demoapp_factories.UserLogFactory() self.db.session.commit() self.assertIsNotNone(userlog.id) self.assertIsNotNone(userlog.user.id) self.assertEqual(1, len(demoapp.User.query.all())) self.assertEqual(1, len(demoapp.UserLog.query.all()))
import unittest import demoapp import demoapp_factories class DemoAppTestCase(unittest.TestCase): def setUp(self): demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' demoapp.app.config['TESTING'] = True self.app = demoapp.app.test_client() self.db = demoapp.db self.db.create_all() def tearDown(self): self.db.drop_all() def test_user_factory(self): user = demoapp_factories.UserFactory() self.db.session.commit() self.assertIsNotNone(user.id) self.assertEqual(1, len(demoapp.User.query.all())) def test_userlog_factory(self): userlog = demoapp_factories.UserLogFactory() self.db.session.commit() self.assertIsNotNone(userlog.id) self.assertIsNotNone(userlog.user.id) self.assertEqual(1, len(demoapp.User.query.all())) self.assertEqual(1, len(demoapp.UserLog.query.all()))
Remove useless imports from flask alchemy demo
Remove useless imports from flask alchemy demo
Python
mit
FactoryBoy/factory_boy
- import os import unittest - import tempfile import demoapp import demoapp_factories + class DemoAppTestCase(unittest.TestCase): def setUp(self): demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' demoapp.app.config['TESTING'] = True self.app = demoapp.app.test_client() self.db = demoapp.db self.db.create_all() def tearDown(self): self.db.drop_all() def test_user_factory(self): user = demoapp_factories.UserFactory() self.db.session.commit() self.assertIsNotNone(user.id) self.assertEqual(1, len(demoapp.User.query.all())) def test_userlog_factory(self): userlog = demoapp_factories.UserLogFactory() self.db.session.commit() self.assertIsNotNone(userlog.id) self.assertIsNotNone(userlog.user.id) self.assertEqual(1, len(demoapp.User.query.all())) self.assertEqual(1, len(demoapp.UserLog.query.all()))
Remove useless imports from flask alchemy demo
## Code Before: import os import unittest import tempfile import demoapp import demoapp_factories class DemoAppTestCase(unittest.TestCase): def setUp(self): demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' demoapp.app.config['TESTING'] = True self.app = demoapp.app.test_client() self.db = demoapp.db self.db.create_all() def tearDown(self): self.db.drop_all() def test_user_factory(self): user = demoapp_factories.UserFactory() self.db.session.commit() self.assertIsNotNone(user.id) self.assertEqual(1, len(demoapp.User.query.all())) def test_userlog_factory(self): userlog = demoapp_factories.UserLogFactory() self.db.session.commit() self.assertIsNotNone(userlog.id) self.assertIsNotNone(userlog.user.id) self.assertEqual(1, len(demoapp.User.query.all())) self.assertEqual(1, len(demoapp.UserLog.query.all())) ## Instruction: Remove useless imports from flask alchemy demo ## Code After: import unittest import demoapp import demoapp_factories class DemoAppTestCase(unittest.TestCase): def setUp(self): demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' demoapp.app.config['TESTING'] = True self.app = demoapp.app.test_client() self.db = demoapp.db self.db.create_all() def tearDown(self): self.db.drop_all() def test_user_factory(self): user = demoapp_factories.UserFactory() self.db.session.commit() self.assertIsNotNone(user.id) self.assertEqual(1, len(demoapp.User.query.all())) def test_userlog_factory(self): userlog = demoapp_factories.UserLogFactory() self.db.session.commit() self.assertIsNotNone(userlog.id) self.assertIsNotNone(userlog.user.id) self.assertEqual(1, len(demoapp.User.query.all())) self.assertEqual(1, len(demoapp.UserLog.query.all()))
- import os import unittest - import tempfile import demoapp import demoapp_factories + class DemoAppTestCase(unittest.TestCase): def setUp(self): demoapp.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' demoapp.app.config['TESTING'] = True self.app = demoapp.app.test_client() self.db = demoapp.db self.db.create_all() def tearDown(self): self.db.drop_all() def test_user_factory(self): user = demoapp_factories.UserFactory() self.db.session.commit() self.assertIsNotNone(user.id) self.assertEqual(1, len(demoapp.User.query.all())) def test_userlog_factory(self): userlog = demoapp_factories.UserLogFactory() self.db.session.commit() self.assertIsNotNone(userlog.id) self.assertIsNotNone(userlog.user.id) self.assertEqual(1, len(demoapp.User.query.all())) self.assertEqual(1, len(demoapp.UserLog.query.all()))
e861e74374d22d3684dccfa5e0063ff37549bcfc
api/app.py
api/app.py
from flask import Flask from flask import request from flask import jsonify from y_text_recommender_system.recommender import recommend app = Flask(__name__) class InvalidUsage(Exception): status_code = 400 def __init__(self, message, payload=None): Exception.__init__(self) self.message = message self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response @app.route('/recommender/', methods=['POST']) def recommender(): content = request.get_json() if content is not None: doc = content.get('doc', {}) docs = content.get('docs', []) _verify_parameters(doc, docs) result = recommend(doc, docs) return jsonify(result) else: msg = 'You need to send the parameters: doc and docs' raise InvalidUsage(msg) def _verify_parameters(doc, docs): if doc == {}: msg = 'The parameter `doc` is missing or empty' raise InvalidUsage(msg) if not isinstance(doc, dict): msg = 'The parameter `doc` should be a dict' raise InvalidUsage(msg) if len(docs) == 0: msg = 'The parameter `docs` is missing or empty' raise InvalidUsage(msg)
from flask import Flask from flask import request from flask import jsonify from y_text_recommender_system.recommender import recommend app = Flask(__name__) class InvalidUsage(Exception): status_code = 400 def __init__(self, message, payload=None): Exception.__init__(self) self.message = message self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response @app.route('/recommender/', methods=['POST']) def recommender(): content = request.get_json() if content is not None: doc = content.get('doc', {}) docs = content.get('docs', []) _verify_parameters(doc, docs) result = recommend(doc, docs) return jsonify(result) else: msg = 'You need to send the parameters: doc and docs' raise InvalidUsage(msg) def _verify_parameters(doc, docs): if bool(doc) is False: msg = 'The parameter `doc` is missing or empty' raise InvalidUsage(msg) if not isinstance(doc, dict): msg = 'The parameter `doc` should be a dict' raise InvalidUsage(msg) if len(docs) == 0: msg = 'The parameter `docs` is missing or empty' raise InvalidUsage(msg)
Refactor to change the comparator of dict
Refactor to change the comparator of dict
Python
mit
joaojunior/y_text_recommender_system
from flask import Flask from flask import request from flask import jsonify from y_text_recommender_system.recommender import recommend app = Flask(__name__) class InvalidUsage(Exception): status_code = 400 def __init__(self, message, payload=None): Exception.__init__(self) self.message = message self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response @app.route('/recommender/', methods=['POST']) def recommender(): content = request.get_json() if content is not None: doc = content.get('doc', {}) docs = content.get('docs', []) _verify_parameters(doc, docs) result = recommend(doc, docs) return jsonify(result) else: msg = 'You need to send the parameters: doc and docs' raise InvalidUsage(msg) def _verify_parameters(doc, docs): - if doc == {}: + if bool(doc) is False: msg = 'The parameter `doc` is missing or empty' raise InvalidUsage(msg) if not isinstance(doc, dict): msg = 'The parameter `doc` should be a dict' raise InvalidUsage(msg) if len(docs) == 0: msg = 'The parameter `docs` is missing or empty' raise InvalidUsage(msg)
Refactor to change the comparator of dict
## Code Before: from flask import Flask from flask import request from flask import jsonify from y_text_recommender_system.recommender import recommend app = Flask(__name__) class InvalidUsage(Exception): status_code = 400 def __init__(self, message, payload=None): Exception.__init__(self) self.message = message self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response @app.route('/recommender/', methods=['POST']) def recommender(): content = request.get_json() if content is not None: doc = content.get('doc', {}) docs = content.get('docs', []) _verify_parameters(doc, docs) result = recommend(doc, docs) return jsonify(result) else: msg = 'You need to send the parameters: doc and docs' raise InvalidUsage(msg) def _verify_parameters(doc, docs): if doc == {}: msg = 'The parameter `doc` is missing or empty' raise InvalidUsage(msg) if not isinstance(doc, dict): msg = 'The parameter `doc` should be a dict' raise InvalidUsage(msg) if len(docs) == 0: msg = 'The parameter `docs` is missing or empty' raise InvalidUsage(msg) ## Instruction: Refactor to change the comparator of dict ## Code After: from flask import Flask from flask import request from flask import jsonify from y_text_recommender_system.recommender import recommend app = Flask(__name__) class InvalidUsage(Exception): status_code = 400 def __init__(self, message, payload=None): Exception.__init__(self) self.message = message self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response @app.route('/recommender/', methods=['POST']) def recommender(): content = request.get_json() if content is not None: doc = content.get('doc', {}) docs = content.get('docs', []) _verify_parameters(doc, docs) result = recommend(doc, docs) return jsonify(result) else: msg = 'You need to send the parameters: doc and docs' raise InvalidUsage(msg) def _verify_parameters(doc, docs): if bool(doc) is False: msg = 'The parameter `doc` is missing or empty' raise InvalidUsage(msg) if not isinstance(doc, dict): msg = 'The parameter `doc` should be a dict' raise InvalidUsage(msg) if len(docs) == 0: msg = 'The parameter `docs` is missing or empty' raise InvalidUsage(msg)
from flask import Flask from flask import request from flask import jsonify from y_text_recommender_system.recommender import recommend app = Flask(__name__) class InvalidUsage(Exception): status_code = 400 def __init__(self, message, payload=None): Exception.__init__(self) self.message = message self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv @app.errorhandler(InvalidUsage) def handle_invalid_usage(error): response = jsonify(error.to_dict()) response.status_code = error.status_code return response @app.route('/recommender/', methods=['POST']) def recommender(): content = request.get_json() if content is not None: doc = content.get('doc', {}) docs = content.get('docs', []) _verify_parameters(doc, docs) result = recommend(doc, docs) return jsonify(result) else: msg = 'You need to send the parameters: doc and docs' raise InvalidUsage(msg) def _verify_parameters(doc, docs): - if doc == {}: + if bool(doc) is False: msg = 'The parameter `doc` is missing or empty' raise InvalidUsage(msg) if not isinstance(doc, dict): msg = 'The parameter `doc` should be a dict' raise InvalidUsage(msg) if len(docs) == 0: msg = 'The parameter `docs` is missing or empty' raise InvalidUsage(msg)
c3d03629734abfead5ae1eae83d1b6dcec792b45
iconizer/django_in_iconizer/django_server.py
iconizer/django_in_iconizer/django_server.py
import os class DjangoServer(object): default_django_manage_script = "manage.py" def __init__(self, django_manage_script=None): super(DjangoServer, self).__init__() if django_manage_script is None: self.django_manage_script = self.default_django_manage_script else: self.django_manage_script = django_manage_script def get_task_descriptor(self, task_name, param_list=[]): task_name_and_param = [self.django_manage_script, task_name] task_name_and_param.extend(param_list) return {task_name: task_name_and_param} # noinspection PyMethodMayBeStatic def get_cmd_str(self, cmd_name, param_list=[]): return "python %s %s" % (self.django_manage_script, cmd_name) def execute_cmd(self, django_cmd): os.system(self.get_cmd_str(django_cmd))
import os class DjangoServer(object): default_django_manage_script = "manage.py" def __init__(self, django_manage_script=None): super(DjangoServer, self).__init__() if django_manage_script is None: self.django_manage_script = self.default_django_manage_script else: self.django_manage_script = django_manage_script self.django_manage_script = os.environ.get("MANAGE_PY", self.django_manage_script) def get_task_descriptor(self, task_name, param_list=[]): task_name_and_param = [self.django_manage_script, task_name] task_name_and_param.extend(param_list) return {task_name: task_name_and_param} # noinspection PyMethodMayBeStatic def get_cmd_str(self, cmd_name, param_list=[]): return "python %s %s" % (self.django_manage_script, cmd_name) def execute_cmd(self, django_cmd): os.system(self.get_cmd_str(django_cmd))
Enable specify command line processor.
Enable specify command line processor.
Python
bsd-3-clause
weijia/iconizer
import os class DjangoServer(object): default_django_manage_script = "manage.py" def __init__(self, django_manage_script=None): super(DjangoServer, self).__init__() if django_manage_script is None: self.django_manage_script = self.default_django_manage_script else: self.django_manage_script = django_manage_script + self.django_manage_script = os.environ.get("MANAGE_PY", self.django_manage_script) def get_task_descriptor(self, task_name, param_list=[]): task_name_and_param = [self.django_manage_script, task_name] task_name_and_param.extend(param_list) return {task_name: task_name_and_param} # noinspection PyMethodMayBeStatic def get_cmd_str(self, cmd_name, param_list=[]): return "python %s %s" % (self.django_manage_script, cmd_name) def execute_cmd(self, django_cmd): os.system(self.get_cmd_str(django_cmd))
Enable specify command line processor.
## Code Before: import os class DjangoServer(object): default_django_manage_script = "manage.py" def __init__(self, django_manage_script=None): super(DjangoServer, self).__init__() if django_manage_script is None: self.django_manage_script = self.default_django_manage_script else: self.django_manage_script = django_manage_script def get_task_descriptor(self, task_name, param_list=[]): task_name_and_param = [self.django_manage_script, task_name] task_name_and_param.extend(param_list) return {task_name: task_name_and_param} # noinspection PyMethodMayBeStatic def get_cmd_str(self, cmd_name, param_list=[]): return "python %s %s" % (self.django_manage_script, cmd_name) def execute_cmd(self, django_cmd): os.system(self.get_cmd_str(django_cmd)) ## Instruction: Enable specify command line processor. ## Code After: import os class DjangoServer(object): default_django_manage_script = "manage.py" def __init__(self, django_manage_script=None): super(DjangoServer, self).__init__() if django_manage_script is None: self.django_manage_script = self.default_django_manage_script else: self.django_manage_script = django_manage_script self.django_manage_script = os.environ.get("MANAGE_PY", self.django_manage_script) def get_task_descriptor(self, task_name, param_list=[]): task_name_and_param = [self.django_manage_script, task_name] task_name_and_param.extend(param_list) return {task_name: task_name_and_param} # noinspection PyMethodMayBeStatic def get_cmd_str(self, cmd_name, param_list=[]): return "python %s %s" % (self.django_manage_script, cmd_name) def execute_cmd(self, django_cmd): os.system(self.get_cmd_str(django_cmd))
import os class DjangoServer(object): default_django_manage_script = "manage.py" def __init__(self, django_manage_script=None): super(DjangoServer, self).__init__() if django_manage_script is None: self.django_manage_script = self.default_django_manage_script else: self.django_manage_script = django_manage_script + self.django_manage_script = os.environ.get("MANAGE_PY", self.django_manage_script) def get_task_descriptor(self, task_name, param_list=[]): task_name_and_param = [self.django_manage_script, task_name] task_name_and_param.extend(param_list) return {task_name: task_name_and_param} # noinspection PyMethodMayBeStatic def get_cmd_str(self, cmd_name, param_list=[]): return "python %s %s" % (self.django_manage_script, cmd_name) def execute_cmd(self, django_cmd): os.system(self.get_cmd_str(django_cmd))
094c3e428644631c78837def24ac65ba4c84b1c7
pythainlp/tokenize/__init__.py
pythainlp/tokenize/__init__.py
from pythainlp.corpus import thai_syllables, thai_words from pythainlp.util.trie import Trie DEFAULT_WORD_TOKENIZE_ENGINE = "newmm" DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut" DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc" DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict" DEFAULT_WORD_DICT_TRIE = Trie(thai_words()) DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables()) DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE from pythainlp.tokenize.tokenize import ( Tokenizer, sent_tokenize, subword_tokenize, syllable_tokenize, word_tokenize, ) __all__ = [ "Tokenizer", "sent_tokenize", "subword_tokenize", "syllable_tokenize", "word_tokenize", ]
from pythainlp.corpus import thai_syllables, thai_words from pythainlp.util.trie import Trie DEFAULT_WORD_TOKENIZE_ENGINE = "newmm" DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut" DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc" DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict" DEFAULT_WORD_DICT_TRIE = Trie(thai_words()) DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables()) DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE from pythainlp.tokenize.tokenize import ( Tokenizer, sent_tokenize, subword_tokenize, syllable_tokenize, word_tokenize, ) __all__ = [ "Tokenizer", "Trie", "sent_tokenize", "subword_tokenize", "syllable_tokenize", "word_tokenize", ]
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)
Python
apache-2.0
PyThaiNLP/pythainlp
from pythainlp.corpus import thai_syllables, thai_words from pythainlp.util.trie import Trie DEFAULT_WORD_TOKENIZE_ENGINE = "newmm" DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut" DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc" DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict" DEFAULT_WORD_DICT_TRIE = Trie(thai_words()) DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables()) DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE from pythainlp.tokenize.tokenize import ( Tokenizer, sent_tokenize, subword_tokenize, syllable_tokenize, word_tokenize, ) __all__ = [ "Tokenizer", + "Trie", "sent_tokenize", "subword_tokenize", "syllable_tokenize", "word_tokenize", ]
Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later)
## Code Before: from pythainlp.corpus import thai_syllables, thai_words from pythainlp.util.trie import Trie DEFAULT_WORD_TOKENIZE_ENGINE = "newmm" DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut" DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc" DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict" DEFAULT_WORD_DICT_TRIE = Trie(thai_words()) DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables()) DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE from pythainlp.tokenize.tokenize import ( Tokenizer, sent_tokenize, subword_tokenize, syllable_tokenize, word_tokenize, ) __all__ = [ "Tokenizer", "sent_tokenize", "subword_tokenize", "syllable_tokenize", "word_tokenize", ] ## Instruction: Make pythainlp.util.Trie still accessibl through pythainlp.tokenize.Trie (will deprecate later) ## Code After: from pythainlp.corpus import thai_syllables, thai_words from pythainlp.util.trie import Trie DEFAULT_WORD_TOKENIZE_ENGINE = "newmm" DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut" DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc" DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict" DEFAULT_WORD_DICT_TRIE = Trie(thai_words()) DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables()) DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE from pythainlp.tokenize.tokenize import ( Tokenizer, sent_tokenize, subword_tokenize, syllable_tokenize, word_tokenize, ) __all__ = [ "Tokenizer", "Trie", "sent_tokenize", "subword_tokenize", "syllable_tokenize", "word_tokenize", ]
from pythainlp.corpus import thai_syllables, thai_words from pythainlp.util.trie import Trie DEFAULT_WORD_TOKENIZE_ENGINE = "newmm" DEFAULT_SENT_TOKENIZE_ENGINE = "crfcut" DEFAULT_SUBWORD_TOKENIZE_ENGINE = "tcc" DEFAULT_SYLLABLE_TOKENIZE_ENGINE = "dict" DEFAULT_WORD_DICT_TRIE = Trie(thai_words()) DEFAULT_SYLLABLE_DICT_TRIE = Trie(thai_syllables()) DEFAULT_DICT_TRIE = DEFAULT_WORD_DICT_TRIE from pythainlp.tokenize.tokenize import ( Tokenizer, sent_tokenize, subword_tokenize, syllable_tokenize, word_tokenize, ) __all__ = [ "Tokenizer", + "Trie", "sent_tokenize", "subword_tokenize", "syllable_tokenize", "word_tokenize", ]
268c4458161ce754a82e3986787f6703f9122e3e
trackmybmi/users/factories.py
trackmybmi/users/factories.py
import factory from django.contrib.auth.hashers import make_password from .models import Friendship, User class UserFactory(factory.django.DjangoModelFactory): """Create users with default attributes.""" class Meta: model = User email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n)) password = make_password('password') class FriendshipFactory(factory.django.DjangoModelFactory): class Meta: model = Friendship initiator = factory.SubFactory(UserFactory) recipient = factory.SubFactory(UserFactory)
import factory from django.contrib.auth import get_user_model from django.contrib.auth.hashers import make_password from .models import Friendship User = get_user_model() class UserFactory(factory.django.DjangoModelFactory): """Create users with default attributes.""" class Meta: model = User email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n)) password = make_password('password') class FriendshipFactory(factory.django.DjangoModelFactory): class Meta: model = Friendship initiator = factory.SubFactory(UserFactory) recipient = factory.SubFactory(UserFactory)
Replace User import with call to get_user_model()
Replace User import with call to get_user_model()
Python
mit
ojh/trackmybmi
import factory + from django.contrib.auth import get_user_model from django.contrib.auth.hashers import make_password - from .models import Friendship, User + from .models import Friendship + + + User = get_user_model() class UserFactory(factory.django.DjangoModelFactory): """Create users with default attributes.""" class Meta: model = User email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n)) password = make_password('password') class FriendshipFactory(factory.django.DjangoModelFactory): class Meta: model = Friendship initiator = factory.SubFactory(UserFactory) recipient = factory.SubFactory(UserFactory)
Replace User import with call to get_user_model()
## Code Before: import factory from django.contrib.auth.hashers import make_password from .models import Friendship, User class UserFactory(factory.django.DjangoModelFactory): """Create users with default attributes.""" class Meta: model = User email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n)) password = make_password('password') class FriendshipFactory(factory.django.DjangoModelFactory): class Meta: model = Friendship initiator = factory.SubFactory(UserFactory) recipient = factory.SubFactory(UserFactory) ## Instruction: Replace User import with call to get_user_model() ## Code After: import factory from django.contrib.auth import get_user_model from django.contrib.auth.hashers import make_password from .models import Friendship User = get_user_model() class UserFactory(factory.django.DjangoModelFactory): """Create users with default attributes.""" class Meta: model = User email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n)) password = make_password('password') class FriendshipFactory(factory.django.DjangoModelFactory): class Meta: model = Friendship initiator = factory.SubFactory(UserFactory) recipient = factory.SubFactory(UserFactory)
import factory + from django.contrib.auth import get_user_model from django.contrib.auth.hashers import make_password - from .models import Friendship, User ? ------ + from .models import Friendship + + + User = get_user_model() class UserFactory(factory.django.DjangoModelFactory): """Create users with default attributes.""" class Meta: model = User email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n)) password = make_password('password') class FriendshipFactory(factory.django.DjangoModelFactory): class Meta: model = Friendship initiator = factory.SubFactory(UserFactory) recipient = factory.SubFactory(UserFactory)
739018911befdb6804f26bc1a99dba6faa1313b7
mezzanine/core/auth_backends.py
mezzanine/core/auth_backends.py
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
Fix kwargs usage to work with other auth backends.
Fix kwargs usage to work with other auth backends.
Python
bsd-2-clause
ryneeverett/mezzanine,nikolas/mezzanine,tuxinhang1989/mezzanine,dustinrb/mezzanine,sjuxax/mezzanine,emile2016/mezzanine,orlenko/sfpirg,Kniyl/mezzanine,webounty/mezzanine,molokov/mezzanine,spookylukey/mezzanine,Skytorn86/mezzanine,christianwgd/mezzanine,jjz/mezzanine,agepoly/mezzanine,jjz/mezzanine,tuxinhang1989/mezzanine,PegasusWang/mezzanine,theclanks/mezzanine,adrian-the-git/mezzanine,SoLoHiC/mezzanine,adrian-the-git/mezzanine,agepoly/mezzanine,stephenmcd/mezzanine,SoLoHiC/mezzanine,Kniyl/mezzanine,dsanders11/mezzanine,theclanks/mezzanine,orlenko/plei,stbarnabas/mezzanine,damnfine/mezzanine,gbosh/mezzanine,dovydas/mezzanine,scarcry/snm-mezzanine,promil23/mezzanine,christianwgd/mezzanine,frankchin/mezzanine,industrydive/mezzanine,emile2016/mezzanine,mush42/mezzanine,Skytorn86/mezzanine,sjdines/mezzanine,joshcartme/mezzanine,batpad/mezzanine,saintbird/mezzanine,dovydas/mezzanine,saintbird/mezzanine,ZeroXn/mezzanine,promil23/mezzanine,biomassives/mezzanine,molokov/mezzanine,joshcartme/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,tuxinhang1989/mezzanine,stephenmcd/mezzanine,Kniyl/mezzanine,jjz/mezzanine,readevalprint/mezzanine,wyzex/mezzanine,nikolas/mezzanine,AlexHill/mezzanine,adrian-the-git/mezzanine,gradel/mezzanine,orlenko/plei,SoLoHiC/mezzanine,gradel/mezzanine,vladir/mezzanine,douglaskastle/mezzanine,orlenko/sfpirg,stbarnabas/mezzanine,frankier/mezzanine,fusionbox/mezzanine,sjdines/mezzanine,molokov/mezzanine,wrwrwr/mezzanine,promil23/mezzanine,Skytorn86/mezzanine,ryneeverett/mezzanine,Cajoline/mezzanine,mush42/mezzanine,jerivas/mezzanine,dustinrb/mezzanine,saintbird/mezzanine,viaregio/mezzanine,theclanks/mezzanine,sjdines/mezzanine,cccs-web/mezzanine,wbtuomela/mezzanine,industrydive/mezzanine,gradel/mezzanine,agepoly/mezzanine,vladir/mezzanine,frankier/mezzanine,viaregio/mezzanine,webounty/mezzanine,jerivas/mezzanine,douglaskastle/mezzanine,sjuxax/mezzanine,fusionbox/mezzanine,eino-makitalo/mezzanine,ZeroXn/mezzanine,industrydive/mezzanine,dovydas/mezzanine,dekomote/mezzanine-modeltranslation-backport,joshcartme/mezzanine,frankchin/mezzanine,orlenko/plei,scarcry/snm-mezzanine,PegasusWang/mezzanine,PegasusWang/mezzanine,Cajoline/mezzanine,ZeroXn/mezzanine,orlenko/sfpirg,geodesign/mezzanine,dekomote/mezzanine-modeltranslation-backport,dsanders11/mezzanine,spookylukey/mezzanine,dsanders11/mezzanine,eino-makitalo/mezzanine,viaregio/mezzanine,dustinrb/mezzanine,frankier/mezzanine,ryneeverett/mezzanine,damnfine/mezzanine,wrwrwr/mezzanine,sjuxax/mezzanine,geodesign/mezzanine,wbtuomela/mezzanine,biomassives/mezzanine,biomassives/mezzanine,nikolas/mezzanine,Cicero-Zhao/mezzanine,geodesign/mezzanine,wyzex/mezzanine,AlexHill/mezzanine,wbtuomela/mezzanine,christianwgd/mezzanine,mush42/mezzanine,readevalprint/mezzanine,webounty/mezzanine,batpad/mezzanine,stephenmcd/mezzanine,guibernardino/mezzanine,douglaskastle/mezzanine,spookylukey/mezzanine,emile2016/mezzanine,vladir/mezzanine,eino-makitalo/mezzanine,Cajoline/mezzanine,dekomote/mezzanine-modeltranslation-backport,damnfine/mezzanine,wyzex/mezzanine,gbosh/mezzanine,cccs-web/mezzanine,readevalprint/mezzanine,frankchin/mezzanine,gbosh/mezzanine,guibernardino/mezzanine,scarcry/snm-mezzanine
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: + if 'uidb36' not in kwargs: + return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
Fix kwargs usage to work with other auth backends.
## Code Before: from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user ## Instruction: Fix kwargs usage to work with other auth backends. ## Code After: from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: if 'uidb36' not in kwargs: return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
from django.contrib.auth.backends import ModelBackend from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.utils.http import base36_to_int class MezzanineBackend(ModelBackend): """ Extends Django's ``ModelBackend`` to allow login via username, email, or verification token. Args are either ``username`` and ``password``, or ``uidb36`` and ``token``. In either case, ``is_active`` can also be given. For login, is_active is not given, so that the login form can raise a specific error for inactive users. For password reset, True is given for is_active. For signup verficiation, False is given for is_active. """ def authenticate(self, **kwargs): if kwargs: username = kwargs.pop("username", None) if username: username_or_email = Q(username=username) | Q(email=username) password = kwargs.pop("password") try: user = User.objects.get(username_or_email, **kwargs) except User.DoesNotExist: pass else: if user.check_password(password): return user else: + if 'uidb36' not in kwargs: + return kwargs["id"] = base36_to_int(kwargs.pop("uidb36")) token = kwargs.pop("token") try: user = User.objects.get(**kwargs) except User.DoesNotExist: pass else: if default_token_generator.check_token(user, token): return user
3429a1b543208adf95e60c89477a4219a5a366a3
makesty.py
makesty.py
import re # Input file created from http://astronautweb.co/snippet/font-awesome/ INPUT_FILE = 'htmlfontawesome.txt' OUTPUT_FILE = 'fontawesome.sty' with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w: for line in r: # Expects to find 'fa-NAME' ending with " name = re.findall(r'fa-[^""]*', line)[0] # Expects to find '\fSYMBOL' ending with " symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper() camel_case = [w.capitalize() for w in name.split('-')] camel_case[0] = camel_case[0].lower() camel_name = ''.join(camel_case) name = name.lstrip('fa-') print('\expandafter\def\csname faicon@{name}\endcsname ' '{{\symbol{{"{symbol}}}}} \def\{camel_name} ' '{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name, camel_name=camel_name, symbol=symbol), file=w)
import re # Input file created from http://astronautweb.co/snippet/font-awesome/ INPUT_FILE = 'htmlfontawesome.txt' OUTPUT_FILE = 'fontawesome.sty' OUTPUT_HEADER = r''' % Identify this package. \NeedsTeXFormat{LaTeX2e} \ProvidesPackage{fontawesome}[2014/04/24 v4.0.3 font awesome icons] % Requirements to use. \usepackage{fontspec} % Define shortcut to load the Font Awesome font. \newfontfamily{\FA}{FontAwesome} % Generic command displaying an icon by its name. \newcommand*{\faicon}[1]{{ \FA\csname faicon@#1\endcsname }} ''' with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w: print(OUTPUT_HEADER, file=w) for line in r: # Expects to find 'fa-NAME' ending with " name = re.findall(r'fa-[^""]*', line)[0] # Expects to find '\fSYMBOL' ending with " symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper() camel_case = [w.capitalize() for w in name.split('-')] camel_case[0] = camel_case[0].lower() camel_name = ''.join(camel_case) name = name.lstrip('fa-') print('\expandafter\def\csname faicon@{name}\endcsname ' '{{\symbol{{"{symbol}}}}} \def\{camel_name} ' '{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name, camel_name=camel_name, symbol=symbol), file=w) print(r'\endinput', file=w)
Add header and footer sections to the .sty.
Add header and footer sections to the .sty.
Python
mit
posquit0/latex-fontawesome
import re # Input file created from http://astronautweb.co/snippet/font-awesome/ INPUT_FILE = 'htmlfontawesome.txt' OUTPUT_FILE = 'fontawesome.sty' + OUTPUT_HEADER = r''' + % Identify this package. + \NeedsTeXFormat{LaTeX2e} + \ProvidesPackage{fontawesome}[2014/04/24 v4.0.3 font awesome icons] + + % Requirements to use. + \usepackage{fontspec} + + % Define shortcut to load the Font Awesome font. + \newfontfamily{\FA}{FontAwesome} + % Generic command displaying an icon by its name. + \newcommand*{\faicon}[1]{{ + \FA\csname faicon@#1\endcsname + }} + ''' + with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w: + print(OUTPUT_HEADER, file=w) for line in r: # Expects to find 'fa-NAME' ending with " name = re.findall(r'fa-[^""]*', line)[0] # Expects to find '\fSYMBOL' ending with " symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper() camel_case = [w.capitalize() for w in name.split('-')] camel_case[0] = camel_case[0].lower() camel_name = ''.join(camel_case) name = name.lstrip('fa-') print('\expandafter\def\csname faicon@{name}\endcsname ' '{{\symbol{{"{symbol}}}}} \def\{camel_name} ' '{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name, camel_name=camel_name, symbol=symbol), file=w) + print(r'\endinput', file=w)
Add header and footer sections to the .sty.
## Code Before: import re # Input file created from http://astronautweb.co/snippet/font-awesome/ INPUT_FILE = 'htmlfontawesome.txt' OUTPUT_FILE = 'fontawesome.sty' with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w: for line in r: # Expects to find 'fa-NAME' ending with " name = re.findall(r'fa-[^""]*', line)[0] # Expects to find '\fSYMBOL' ending with " symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper() camel_case = [w.capitalize() for w in name.split('-')] camel_case[0] = camel_case[0].lower() camel_name = ''.join(camel_case) name = name.lstrip('fa-') print('\expandafter\def\csname faicon@{name}\endcsname ' '{{\symbol{{"{symbol}}}}} \def\{camel_name} ' '{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name, camel_name=camel_name, symbol=symbol), file=w) ## Instruction: Add header and footer sections to the .sty. ## Code After: import re # Input file created from http://astronautweb.co/snippet/font-awesome/ INPUT_FILE = 'htmlfontawesome.txt' OUTPUT_FILE = 'fontawesome.sty' OUTPUT_HEADER = r''' % Identify this package. \NeedsTeXFormat{LaTeX2e} \ProvidesPackage{fontawesome}[2014/04/24 v4.0.3 font awesome icons] % Requirements to use. \usepackage{fontspec} % Define shortcut to load the Font Awesome font. \newfontfamily{\FA}{FontAwesome} % Generic command displaying an icon by its name. \newcommand*{\faicon}[1]{{ \FA\csname faicon@#1\endcsname }} ''' with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w: print(OUTPUT_HEADER, file=w) for line in r: # Expects to find 'fa-NAME' ending with " name = re.findall(r'fa-[^""]*', line)[0] # Expects to find '\fSYMBOL' ending with " symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper() camel_case = [w.capitalize() for w in name.split('-')] camel_case[0] = camel_case[0].lower() camel_name = ''.join(camel_case) name = name.lstrip('fa-') print('\expandafter\def\csname faicon@{name}\endcsname ' '{{\symbol{{"{symbol}}}}} \def\{camel_name} ' '{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name, camel_name=camel_name, symbol=symbol), file=w) print(r'\endinput', file=w)
import re # Input file created from http://astronautweb.co/snippet/font-awesome/ INPUT_FILE = 'htmlfontawesome.txt' OUTPUT_FILE = 'fontawesome.sty' + OUTPUT_HEADER = r''' + % Identify this package. + \NeedsTeXFormat{LaTeX2e} + \ProvidesPackage{fontawesome}[2014/04/24 v4.0.3 font awesome icons] + + % Requirements to use. + \usepackage{fontspec} + + % Define shortcut to load the Font Awesome font. + \newfontfamily{\FA}{FontAwesome} + % Generic command displaying an icon by its name. + \newcommand*{\faicon}[1]{{ + \FA\csname faicon@#1\endcsname + }} + ''' + with open(INPUT_FILE) as r, open(OUTPUT_FILE, 'w') as w: + print(OUTPUT_HEADER, file=w) for line in r: # Expects to find 'fa-NAME' ending with " name = re.findall(r'fa-[^""]*', line)[0] # Expects to find '\fSYMBOL' ending with " symbol = re.findall(r'\\f[^"]*', line)[0][1:].upper() camel_case = [w.capitalize() for w in name.split('-')] camel_case[0] = camel_case[0].lower() camel_name = ''.join(camel_case) name = name.lstrip('fa-') print('\expandafter\def\csname faicon@{name}\endcsname ' '{{\symbol{{"{symbol}}}}} \def\{camel_name} ' '{{{{\FA\csname faicon@{name}\endcsname}}}}'.format(name=name, camel_name=camel_name, symbol=symbol), file=w) + print(r'\endinput', file=w)
89cda8553c662ac7b435516d888706e3f3193cb7
sir/__main__.py
sir/__main__.py
import argparse from .schema import SCHEMA def reindex(args): known_entities = SCHEMA.keys() if args['entities'] is not None: entities = [] for e in args['entities']: entities.extend(e.split(',')) unknown_entities = set(known_entities) - set(entities) if unknown_entities: raise ValueError("{0} are unkown entity types".format(unknown_entities)) else: entities = known_entities print(entities) def watch(args): raise NotImplementedError def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type") reindex_parser.set_defaults(func=reindex) reindex_parser.add_argument('--entities', action='append', help='The entities to reindex' ) watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue") watch_parser.set_defaults(func=watch) args = parser.parse_args() args.func(vars(args)) if __name__ == '__main__': main()
import argparse from .schema import SCHEMA def reindex(args): known_entities = SCHEMA.keys() if args['entities'] is not None: entities = [] for e in args['entities']: entities.extend(e.split(',')) unknown_entities = set(entities) - set(known_entities) if unknown_entities: raise ValueError("{0} are unkown entity types".format(unknown_entities)) else: entities = known_entities print(entities) def watch(args): raise NotImplementedError def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type") reindex_parser.set_defaults(func=reindex) reindex_parser.add_argument('--entities', action='append', help='The entities to reindex' ) watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue") watch_parser.set_defaults(func=watch) args = parser.parse_args() args.func(vars(args)) if __name__ == '__main__': main()
Fix the unknown entity type test
Fix the unknown entity type test We want to check if any user-supplied entity name is unkown, not if any of the known types are not in the user-supplied list
Python
mit
jeffweeksio/sir
import argparse from .schema import SCHEMA def reindex(args): known_entities = SCHEMA.keys() if args['entities'] is not None: entities = [] for e in args['entities']: entities.extend(e.split(',')) - unknown_entities = set(known_entities) - set(entities) + unknown_entities = set(entities) - set(known_entities) if unknown_entities: raise ValueError("{0} are unkown entity types".format(unknown_entities)) else: entities = known_entities print(entities) def watch(args): raise NotImplementedError def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type") reindex_parser.set_defaults(func=reindex) reindex_parser.add_argument('--entities', action='append', help='The entities to reindex' ) watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue") watch_parser.set_defaults(func=watch) args = parser.parse_args() args.func(vars(args)) if __name__ == '__main__': main()
Fix the unknown entity type test
## Code Before: import argparse from .schema import SCHEMA def reindex(args): known_entities = SCHEMA.keys() if args['entities'] is not None: entities = [] for e in args['entities']: entities.extend(e.split(',')) unknown_entities = set(known_entities) - set(entities) if unknown_entities: raise ValueError("{0} are unkown entity types".format(unknown_entities)) else: entities = known_entities print(entities) def watch(args): raise NotImplementedError def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type") reindex_parser.set_defaults(func=reindex) reindex_parser.add_argument('--entities', action='append', help='The entities to reindex' ) watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue") watch_parser.set_defaults(func=watch) args = parser.parse_args() args.func(vars(args)) if __name__ == '__main__': main() ## Instruction: Fix the unknown entity type test ## Code After: import argparse from .schema import SCHEMA def reindex(args): known_entities = SCHEMA.keys() if args['entities'] is not None: entities = [] for e in args['entities']: entities.extend(e.split(',')) unknown_entities = set(entities) - set(known_entities) if unknown_entities: raise ValueError("{0} are unkown entity types".format(unknown_entities)) else: entities = known_entities print(entities) def watch(args): raise NotImplementedError def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type") reindex_parser.set_defaults(func=reindex) reindex_parser.add_argument('--entities', action='append', help='The entities to reindex' ) watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue") watch_parser.set_defaults(func=watch) args = parser.parse_args() args.func(vars(args)) if __name__ == '__main__': main()
import argparse from .schema import SCHEMA def reindex(args): known_entities = SCHEMA.keys() if args['entities'] is not None: entities = [] for e in args['entities']: entities.extend(e.split(',')) - unknown_entities = set(known_entities) - set(entities) ? ------ + unknown_entities = set(entities) - set(known_entities) ? ++++++ if unknown_entities: raise ValueError("{0} are unkown entity types".format(unknown_entities)) else: entities = known_entities print(entities) def watch(args): raise NotImplementedError def main(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type") reindex_parser.set_defaults(func=reindex) reindex_parser.add_argument('--entities', action='append', help='The entities to reindex' ) watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue") watch_parser.set_defaults(func=watch) args = parser.parse_args() args.func(vars(args)) if __name__ == '__main__': main()
4078dcd4a35dd09c610bb5e9298a87828a0acf8e
apps/core/models.py
apps/core/models.py
from django.db import models # Create your models here.
from django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
Implement an abstract base class model
Implement an abstract base class model
Python
mit
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
from django.db import models + from django.utils.timezone import now - # Create your models here. + class DateTimeCreatedField(models.DateTimeField): + """ + DateTimeField that by default, sets editable=False, + blank=True, default=now. + """ + + def __init__(self, *args, **kwargs): + kwargs.setdefault('editable', False) + kwargs.setdefault('blank', True) + kwargs.setdefault('default', now) + super(DateTimeCreatedField, self).__init__(*args, **kwargs) + + def get_internal_type(self): + return "DateTimeField" + + def south_field_triple(self): + "Returns a suitable description of this field for South." + from south.modelsinspector import introspector + + field_class = "django.db.models.fields.DateTimeField" + args, kwargs = introspector(self) + + return (field_class, args, kwargs) + + + class DateTimeModifiedField(DateTimeCreatedField): + """ + DateTimeField that by default, sets editable=False, + blank=True, default=datetime.now. + + Sets value to now() on each save of the model. + """ + + def pre_save(self, model, add): + value = now() + setattr(model, self.attname, value) + return value + + + class BaseModel(models.Model): + """ + An abstract base class model that provides: + - date_created + - date_modified + """ + date_created = DateTimeCreatedField() + date_modified = DateTimeModifiedField() + + class Meta: + get_latest_by = 'date_modified' + ordering = ('-date_modified', '-date_created',) + abstract = True +
Implement an abstract base class model
## Code Before: from django.db import models # Create your models here. ## Instruction: Implement an abstract base class model ## Code After: from django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
from django.db import models + from django.utils.timezone import now - # Create your models here. + + class DateTimeCreatedField(models.DateTimeField): + """ + DateTimeField that by default, sets editable=False, + blank=True, default=now. + """ + + def __init__(self, *args, **kwargs): + kwargs.setdefault('editable', False) + kwargs.setdefault('blank', True) + kwargs.setdefault('default', now) + super(DateTimeCreatedField, self).__init__(*args, **kwargs) + + def get_internal_type(self): + return "DateTimeField" + + def south_field_triple(self): + "Returns a suitable description of this field for South." + from south.modelsinspector import introspector + + field_class = "django.db.models.fields.DateTimeField" + args, kwargs = introspector(self) + + return (field_class, args, kwargs) + + + class DateTimeModifiedField(DateTimeCreatedField): + """ + DateTimeField that by default, sets editable=False, + blank=True, default=datetime.now. + + Sets value to now() on each save of the model. + """ + + def pre_save(self, model, add): + value = now() + setattr(model, self.attname, value) + return value + + + class BaseModel(models.Model): + """ + An abstract base class model that provides: + - date_created + - date_modified + """ + date_created = DateTimeCreatedField() + date_modified = DateTimeModifiedField() + + class Meta: + get_latest_by = 'date_modified' + ordering = ('-date_modified', '-date_created',) + abstract = True
e525a819724f149186b5b156520afe2549e5902a
UliEngineering/Electronics/Power.py
UliEngineering/Electronics/Power.py
from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit import numpy as np __all__ = ["current_by_power", "power_by_current_and_voltage"] def current_by_power(power="25 W", voltage="230 V") -> Unit("A"): """ Given a device's power (or RMS power) and the voltage (or RMS voltage) it runs on, compute how much current it will draw. """ power = normalize_numeric(power) voltage = normalize_numeric(voltage) return power / voltage def power_by_current_and_voltage(current="1.0 A", voltage="230 V") -> Unit("W"): """ Given a device's current (or RMS current) and the voltage (or RMS current) it runs on, compute its power """ current = normalize_numeric(current) voltage = normalize_numeric(voltage) return current * voltage
from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit __all__ = ["current_by_power", "power_by_current_and_voltage"] def current_by_power(power="25 W", voltage="230 V") -> Unit("A"): """ Given a device's power (or RMS power) and the voltage (or RMS voltage) it runs on, compute how much current it will draw. """ power = normalize_numeric(power) voltage = normalize_numeric(voltage) return power / voltage def power_by_current_and_voltage(current="1.0 A", voltage="230 V") -> Unit("W"): """ Given a device's current (or RMS current) and the voltage (or RMS current) it runs on, compute its power """ current = normalize_numeric(current) voltage = normalize_numeric(voltage) return current * voltage
Remove unused numpy input (codacy)
Remove unused numpy input (codacy)
Python
apache-2.0
ulikoehler/UliEngineering
from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit - import numpy as np __all__ = ["current_by_power", "power_by_current_and_voltage"] def current_by_power(power="25 W", voltage="230 V") -> Unit("A"): """ Given a device's power (or RMS power) and the voltage (or RMS voltage) it runs on, compute how much current it will draw. """ power = normalize_numeric(power) voltage = normalize_numeric(voltage) return power / voltage def power_by_current_and_voltage(current="1.0 A", voltage="230 V") -> Unit("W"): """ Given a device's current (or RMS current) and the voltage (or RMS current) it runs on, compute its power """ current = normalize_numeric(current) voltage = normalize_numeric(voltage) return current * voltage
Remove unused numpy input (codacy)
## Code Before: from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit import numpy as np __all__ = ["current_by_power", "power_by_current_and_voltage"] def current_by_power(power="25 W", voltage="230 V") -> Unit("A"): """ Given a device's power (or RMS power) and the voltage (or RMS voltage) it runs on, compute how much current it will draw. """ power = normalize_numeric(power) voltage = normalize_numeric(voltage) return power / voltage def power_by_current_and_voltage(current="1.0 A", voltage="230 V") -> Unit("W"): """ Given a device's current (or RMS current) and the voltage (or RMS current) it runs on, compute its power """ current = normalize_numeric(current) voltage = normalize_numeric(voltage) return current * voltage ## Instruction: Remove unused numpy input (codacy) ## Code After: from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit __all__ = ["current_by_power", "power_by_current_and_voltage"] def current_by_power(power="25 W", voltage="230 V") -> Unit("A"): """ Given a device's power (or RMS power) and the voltage (or RMS voltage) it runs on, compute how much current it will draw. """ power = normalize_numeric(power) voltage = normalize_numeric(voltage) return power / voltage def power_by_current_and_voltage(current="1.0 A", voltage="230 V") -> Unit("W"): """ Given a device's current (or RMS current) and the voltage (or RMS current) it runs on, compute its power """ current = normalize_numeric(current) voltage = normalize_numeric(voltage) return current * voltage
from UliEngineering.EngineerIO import normalize_numeric from UliEngineering.Units import Unit - import numpy as np __all__ = ["current_by_power", "power_by_current_and_voltage"] def current_by_power(power="25 W", voltage="230 V") -> Unit("A"): """ Given a device's power (or RMS power) and the voltage (or RMS voltage) it runs on, compute how much current it will draw. """ power = normalize_numeric(power) voltage = normalize_numeric(voltage) return power / voltage def power_by_current_and_voltage(current="1.0 A", voltage="230 V") -> Unit("W"): """ Given a device's current (or RMS current) and the voltage (or RMS current) it runs on, compute its power """ current = normalize_numeric(current) voltage = normalize_numeric(voltage) return current * voltage
2e9527d00358027a3f85d3087734ab4e87441fc4
reporting_scripts/user_info.py
reporting_scripts/user_info.py
''' This module will retrieve info about students registered in the course Usage: python user_info.py ''' from collections import defaultdict from base_edx import EdXConnection from generate_csv_report import CSV connection = EdXConnection('certificates_generatedcertificate', 'auth_userprofile') collection = connection.get_access_to_collection() documents = collection['auth_userprofile'].find() result = [] for document in documents: user_id = document['user_id'] try: final_grade = collection['certificates_generatedcertificate'].find_one({'user_id' : user_id})['grade'] except: print user_id result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) output = CSV(result, ['User ID','Username', 'Final Grade', 'Gender', 'Year of Birth', 'Level of Education', 'Country', 'City'], output_file='atoc185x_user_info.csv') output.generate_csv()
''' This module will retrieve info about students registered in the course Usage: python user_info.py ''' from collections import defaultdict from base_edx import EdXConnection from generate_csv_report import CSV connection = EdXConnection('certificates_generatedcertificate', 'auth_userprofile') collection = connection.get_access_to_collection() documents = collection['auth_userprofile'].find() result = [] for document in documents: user_id = document['user_id'] try: final_grade = collection['certificates_generatedcertificate'].find_one({'user_id' : user_id})['grade'] result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) except: # Handle users with no grades pass output = CSV(result, ['User ID','Username', 'Final Grade', 'Gender', 'Year of Birth', 'Level of Education', 'Country', 'City'], output_file='atoc185x_user_info.csv') output.generate_csv()
Update to handle users with no final grade
Update to handle users with no final grade
Python
mit
andyzsf/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
''' This module will retrieve info about students registered in the course Usage: python user_info.py ''' from collections import defaultdict from base_edx import EdXConnection from generate_csv_report import CSV connection = EdXConnection('certificates_generatedcertificate', 'auth_userprofile') collection = connection.get_access_to_collection() documents = collection['auth_userprofile'].find() result = [] for document in documents: user_id = document['user_id'] try: final_grade = collection['certificates_generatedcertificate'].find_one({'user_id' : user_id})['grade'] + result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) except: - print user_id - result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) + # Handle users with no grades + pass output = CSV(result, ['User ID','Username', 'Final Grade', 'Gender', 'Year of Birth', 'Level of Education', 'Country', 'City'], output_file='atoc185x_user_info.csv') output.generate_csv()
Update to handle users with no final grade
## Code Before: ''' This module will retrieve info about students registered in the course Usage: python user_info.py ''' from collections import defaultdict from base_edx import EdXConnection from generate_csv_report import CSV connection = EdXConnection('certificates_generatedcertificate', 'auth_userprofile') collection = connection.get_access_to_collection() documents = collection['auth_userprofile'].find() result = [] for document in documents: user_id = document['user_id'] try: final_grade = collection['certificates_generatedcertificate'].find_one({'user_id' : user_id})['grade'] except: print user_id result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) output = CSV(result, ['User ID','Username', 'Final Grade', 'Gender', 'Year of Birth', 'Level of Education', 'Country', 'City'], output_file='atoc185x_user_info.csv') output.generate_csv() ## Instruction: Update to handle users with no final grade ## Code After: ''' This module will retrieve info about students registered in the course Usage: python user_info.py ''' from collections import defaultdict from base_edx import EdXConnection from generate_csv_report import CSV connection = EdXConnection('certificates_generatedcertificate', 'auth_userprofile') collection = connection.get_access_to_collection() documents = collection['auth_userprofile'].find() result = [] for document in documents: user_id = document['user_id'] try: final_grade = collection['certificates_generatedcertificate'].find_one({'user_id' : user_id})['grade'] result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) except: # Handle users with no grades pass output = CSV(result, ['User ID','Username', 'Final Grade', 'Gender', 'Year of Birth', 'Level of Education', 'Country', 'City'], output_file='atoc185x_user_info.csv') output.generate_csv()
''' This module will retrieve info about students registered in the course Usage: python user_info.py ''' from collections import defaultdict from base_edx import EdXConnection from generate_csv_report import CSV connection = EdXConnection('certificates_generatedcertificate', 'auth_userprofile') collection = connection.get_access_to_collection() documents = collection['auth_userprofile'].find() result = [] for document in documents: user_id = document['user_id'] try: final_grade = collection['certificates_generatedcertificate'].find_one({'user_id' : user_id})['grade'] + result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) except: - print user_id - result.append([user_id, document['name'], final_grade, document['gender'], document['year_of_birth'], document['level_of_education'], document['country'], document['city']]) + # Handle users with no grades + pass output = CSV(result, ['User ID','Username', 'Final Grade', 'Gender', 'Year of Birth', 'Level of Education', 'Country', 'City'], output_file='atoc185x_user_info.csv') output.generate_csv()
7fd7e2e8c9472a9dadf7d33991d11de6a68a2736
refmanage/refmanage.py
refmanage/refmanage.py
import os import argparse import fs_utils from pybtex.database.input import bibtex def main(): """ Command-line interface """ parser = argparse.ArgumentParser(description="Manage BibTeX files") parser.add_argument("-t", "--test", action="store_true", help="Test parseability of BibTeX file(s)",) parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output",) parser.add_argument("paths_args", nargs="*", default="*.bib", help="File(s) to test parseability", metavar="files") args = parser.parse_args() test(args) def test(args): """ Implement "test" command-line functionality """ paths = fs_utils.handle_files_args(*args.paths_args) bibs_paths_dict = fs_utils.import_bib_files(*paths) parseables = [] unparseables = [] for key in bibs_paths_dict.keys(): if bibs_paths_dict[key] is None: unparseables.append(key) else: parseables.append(key) print("The following files are unparseable:") for unparseable in unparseables: print("\t" + str(unparseable.resolve())) if __name__ == '__main__': main()
import os import argparse import fs_utils from pybtex.database.input import bibtex def main(): """ Command-line interface """ parser = argparse.ArgumentParser(description="Manage BibTeX files") parser.add_argument("-t", "--test", action="store_true", help="Test parseability of BibTeX file(s)",) parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output",) parser.add_argument("paths_args", nargs="*", default="*.bib", help="File(s) to test parseability", metavar="files") args = parser.parse_args() test(args) def test(args): """ Implement "test" command-line functionality """ paths = fs_utils.handle_files_args(*args.paths_args) bibs_paths_dict = fs_utils.import_bib_files(*paths) parseables = [] parseables_msg = "The following files are parseable:" unparseables = [] unparseables_msg = "The following files are unparseable:" for key in bibs_paths_dict.keys(): if bibs_paths_dict[key] is None: unparseables.append(key) unparseables_msg += "\n\t" + str(key.resolve()) else: parseables.append(key) parseables_msg += "\n\t" + str(key.resolve()) if args.verbose: print(parseables_msg) print("\r") print(unparseables_msg) if __name__ == '__main__': main()
Add functionality to print list of parseable files
Add functionality to print list of parseable files
Python
mit
jrsmith3/refmanage
import os import argparse import fs_utils from pybtex.database.input import bibtex def main(): """ Command-line interface """ parser = argparse.ArgumentParser(description="Manage BibTeX files") parser.add_argument("-t", "--test", action="store_true", help="Test parseability of BibTeX file(s)",) parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output",) parser.add_argument("paths_args", nargs="*", default="*.bib", help="File(s) to test parseability", metavar="files") args = parser.parse_args() test(args) def test(args): """ Implement "test" command-line functionality """ paths = fs_utils.handle_files_args(*args.paths_args) bibs_paths_dict = fs_utils.import_bib_files(*paths) parseables = [] + parseables_msg = "The following files are parseable:" unparseables = [] + unparseables_msg = "The following files are unparseable:" + for key in bibs_paths_dict.keys(): if bibs_paths_dict[key] is None: unparseables.append(key) + unparseables_msg += "\n\t" + str(key.resolve()) else: parseables.append(key) + parseables_msg += "\n\t" + str(key.resolve()) - print("The following files are unparseable:") - for unparseable in unparseables: - print("\t" + str(unparseable.resolve())) + if args.verbose: + print(parseables_msg) + print("\r") + print(unparseables_msg) if __name__ == '__main__': main()
Add functionality to print list of parseable files
## Code Before: import os import argparse import fs_utils from pybtex.database.input import bibtex def main(): """ Command-line interface """ parser = argparse.ArgumentParser(description="Manage BibTeX files") parser.add_argument("-t", "--test", action="store_true", help="Test parseability of BibTeX file(s)",) parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output",) parser.add_argument("paths_args", nargs="*", default="*.bib", help="File(s) to test parseability", metavar="files") args = parser.parse_args() test(args) def test(args): """ Implement "test" command-line functionality """ paths = fs_utils.handle_files_args(*args.paths_args) bibs_paths_dict = fs_utils.import_bib_files(*paths) parseables = [] unparseables = [] for key in bibs_paths_dict.keys(): if bibs_paths_dict[key] is None: unparseables.append(key) else: parseables.append(key) print("The following files are unparseable:") for unparseable in unparseables: print("\t" + str(unparseable.resolve())) if __name__ == '__main__': main() ## Instruction: Add functionality to print list of parseable files ## Code After: import os import argparse import fs_utils from pybtex.database.input import bibtex def main(): """ Command-line interface """ parser = argparse.ArgumentParser(description="Manage BibTeX files") parser.add_argument("-t", "--test", action="store_true", help="Test parseability of BibTeX file(s)",) parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output",) parser.add_argument("paths_args", nargs="*", default="*.bib", help="File(s) to test parseability", metavar="files") args = parser.parse_args() test(args) def test(args): """ Implement "test" command-line functionality """ paths = fs_utils.handle_files_args(*args.paths_args) bibs_paths_dict = fs_utils.import_bib_files(*paths) parseables = [] parseables_msg = "The following files are parseable:" unparseables = [] unparseables_msg = "The following files are unparseable:" for key in bibs_paths_dict.keys(): if bibs_paths_dict[key] is None: unparseables.append(key) unparseables_msg += "\n\t" + str(key.resolve()) else: parseables.append(key) parseables_msg += "\n\t" + str(key.resolve()) if args.verbose: print(parseables_msg) print("\r") print(unparseables_msg) if __name__ == '__main__': main()
import os import argparse import fs_utils from pybtex.database.input import bibtex def main(): """ Command-line interface """ parser = argparse.ArgumentParser(description="Manage BibTeX files") parser.add_argument("-t", "--test", action="store_true", help="Test parseability of BibTeX file(s)",) parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output",) parser.add_argument("paths_args", nargs="*", default="*.bib", help="File(s) to test parseability", metavar="files") args = parser.parse_args() test(args) def test(args): """ Implement "test" command-line functionality """ paths = fs_utils.handle_files_args(*args.paths_args) bibs_paths_dict = fs_utils.import_bib_files(*paths) parseables = [] + parseables_msg = "The following files are parseable:" unparseables = [] + unparseables_msg = "The following files are unparseable:" + for key in bibs_paths_dict.keys(): if bibs_paths_dict[key] is None: unparseables.append(key) + unparseables_msg += "\n\t" + str(key.resolve()) else: parseables.append(key) + parseables_msg += "\n\t" + str(key.resolve()) - print("The following files are unparseable:") - for unparseable in unparseables: - print("\t" + str(unparseable.resolve())) + if args.verbose: + print(parseables_msg) + print("\r") + print(unparseables_msg) if __name__ == '__main__': main()
25db9110d34760118b47b2bdf637cf6947154c2c
tests/unit/distributed/test_objectstore.py
tests/unit/distributed/test_objectstore.py
import pytest from bcbio.distributed import objectstore from bcbio.distributed.objectstore import GoogleDrive @pytest.fixture def mock_api(mocker): mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials') mocker.patch('bcbio.distributed.objectstore.Http') mocker.patch('bcbio.distributed.objectstore.build') mocker.patch('bcbio.distributed.objectstore.http') yield None def test_create_google_drive_service(mock_api): service = GoogleDrive() assert service def test_creates_http_auth(mock_api): GoogleDrive() objectstore.ServiceAccountCredentials.from_json_keyfile_name\ .assert_called_once_with( GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES) def test_api_scope_includes_google_drive(mock_api): drive_scope = 'https://www.googleapis.com/auth/drive' assert drive_scope in GoogleDrive.SCOPES
import pytest from bcbio.distributed import objectstore from bcbio.distributed.objectstore import GoogleDrive @pytest.fixture def mock_api(mocker): mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials') mocker.patch('bcbio.distributed.objectstore.Http') mocker.patch('bcbio.distributed.objectstore.build') mocker.patch('bcbio.distributed.objectstore.http') yield None def test_create_google_drive_service(mock_api): service = GoogleDrive() assert service def test_creates_http_auth(mock_api): GoogleDrive() objectstore.ServiceAccountCredentials.from_json_keyfile_name\ .assert_called_once_with( GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES) def test_api_scope_includes_google_drive(mock_api): drive_scope = 'https://www.googleapis.com/auth/drive' assert drive_scope in GoogleDrive.SCOPES def test_filename_with_json_key_is_present(mock_api): assert GoogleDrive.GOOGLE_API_KEY_FILE assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
Test json file with api key is in API service class
Test json file with api key is in API service class
Python
mit
a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen
import pytest from bcbio.distributed import objectstore from bcbio.distributed.objectstore import GoogleDrive @pytest.fixture def mock_api(mocker): mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials') mocker.patch('bcbio.distributed.objectstore.Http') mocker.patch('bcbio.distributed.objectstore.build') mocker.patch('bcbio.distributed.objectstore.http') yield None def test_create_google_drive_service(mock_api): service = GoogleDrive() assert service def test_creates_http_auth(mock_api): GoogleDrive() objectstore.ServiceAccountCredentials.from_json_keyfile_name\ .assert_called_once_with( GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES) def test_api_scope_includes_google_drive(mock_api): drive_scope = 'https://www.googleapis.com/auth/drive' assert drive_scope in GoogleDrive.SCOPES + + def test_filename_with_json_key_is_present(mock_api): + assert GoogleDrive.GOOGLE_API_KEY_FILE + assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json') +
Test json file with api key is in API service class
## Code Before: import pytest from bcbio.distributed import objectstore from bcbio.distributed.objectstore import GoogleDrive @pytest.fixture def mock_api(mocker): mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials') mocker.patch('bcbio.distributed.objectstore.Http') mocker.patch('bcbio.distributed.objectstore.build') mocker.patch('bcbio.distributed.objectstore.http') yield None def test_create_google_drive_service(mock_api): service = GoogleDrive() assert service def test_creates_http_auth(mock_api): GoogleDrive() objectstore.ServiceAccountCredentials.from_json_keyfile_name\ .assert_called_once_with( GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES) def test_api_scope_includes_google_drive(mock_api): drive_scope = 'https://www.googleapis.com/auth/drive' assert drive_scope in GoogleDrive.SCOPES ## Instruction: Test json file with api key is in API service class ## Code After: import pytest from bcbio.distributed import objectstore from bcbio.distributed.objectstore import GoogleDrive @pytest.fixture def mock_api(mocker): mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials') mocker.patch('bcbio.distributed.objectstore.Http') mocker.patch('bcbio.distributed.objectstore.build') mocker.patch('bcbio.distributed.objectstore.http') yield None def test_create_google_drive_service(mock_api): service = GoogleDrive() assert service def test_creates_http_auth(mock_api): GoogleDrive() objectstore.ServiceAccountCredentials.from_json_keyfile_name\ .assert_called_once_with( GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES) def test_api_scope_includes_google_drive(mock_api): drive_scope = 'https://www.googleapis.com/auth/drive' assert drive_scope in GoogleDrive.SCOPES def test_filename_with_json_key_is_present(mock_api): assert GoogleDrive.GOOGLE_API_KEY_FILE assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
import pytest from bcbio.distributed import objectstore from bcbio.distributed.objectstore import GoogleDrive @pytest.fixture def mock_api(mocker): mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials') mocker.patch('bcbio.distributed.objectstore.Http') mocker.patch('bcbio.distributed.objectstore.build') mocker.patch('bcbio.distributed.objectstore.http') yield None def test_create_google_drive_service(mock_api): service = GoogleDrive() assert service def test_creates_http_auth(mock_api): GoogleDrive() objectstore.ServiceAccountCredentials.from_json_keyfile_name\ .assert_called_once_with( GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES) def test_api_scope_includes_google_drive(mock_api): drive_scope = 'https://www.googleapis.com/auth/drive' assert drive_scope in GoogleDrive.SCOPES + + + def test_filename_with_json_key_is_present(mock_api): + assert GoogleDrive.GOOGLE_API_KEY_FILE + assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
01b03d46d32dd7f9e027220df0681c4f82fe7217
cumulusci/conftest.py
cumulusci/conftest.py
from pytest import fixture from cumulusci.core.github import get_github_api @fixture def gh_api(): return get_github_api("TestOwner", "TestRepo")
import os from pytest import fixture from cumulusci.core.github import get_github_api @fixture def gh_api(): return get_github_api("TestOwner", "TestRepo") @fixture(scope="class", autouse=True) def restore_cwd(): d = os.getcwd() try: yield finally: os.chdir(d)
Add pytest fixture to avoid leakage of cwd changes
Add pytest fixture to avoid leakage of cwd changes
Python
bsd-3-clause
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
+ import os + from pytest import fixture from cumulusci.core.github import get_github_api @fixture def gh_api(): return get_github_api("TestOwner", "TestRepo") + + @fixture(scope="class", autouse=True) + def restore_cwd(): + d = os.getcwd() + try: + yield + finally: + os.chdir(d) +
Add pytest fixture to avoid leakage of cwd changes
## Code Before: from pytest import fixture from cumulusci.core.github import get_github_api @fixture def gh_api(): return get_github_api("TestOwner", "TestRepo") ## Instruction: Add pytest fixture to avoid leakage of cwd changes ## Code After: import os from pytest import fixture from cumulusci.core.github import get_github_api @fixture def gh_api(): return get_github_api("TestOwner", "TestRepo") @fixture(scope="class", autouse=True) def restore_cwd(): d = os.getcwd() try: yield finally: os.chdir(d)
+ import os + from pytest import fixture from cumulusci.core.github import get_github_api @fixture def gh_api(): return get_github_api("TestOwner", "TestRepo") + + + @fixture(scope="class", autouse=True) + def restore_cwd(): + d = os.getcwd() + try: + yield + finally: + os.chdir(d)
9fc53690c8b31fa62391aeec54b29f4ee216402a
test/test_label_install.py
test/test_label_install.py
import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
Revert "Skip install labels test for now"
Revert "Skip install labels test for now" This reverts commit 3016324f9eb84989bcdefa2d3dfe1f766f4ab7e6.
Python
mit
robinedwards/neomodel,robinedwards/neomodel
- import unittest - from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() - @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE") -
Revert "Skip install labels test for now"
## Code Before: import unittest from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE") ## Instruction: Revert "Skip install labels test for now" ## Code After: from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
- import unittest - from neomodel import config, StructuredNode, StringProperty, install_all_labels from neomodel.core import db config.AUTO_INSTALL_LABELS = False class NoConstraintsSetup(StructuredNode): name = StringProperty(unique_index=True) config.AUTO_INSTALL_LABELS = True def test_labels_were_not_installed(): bob = NoConstraintsSetup(name='bob').save() bob2 = NoConstraintsSetup(name='bob').save() assert bob.id != bob2.id for n in NoConstraintsSetup.nodes.all(): n.delete() - @unittest.skip('disabled, broken in travis') def test_install_all(): # run install all labels install_all_labels() assert True # remove constraint for above test db.cypher_query("DROP CONSTRAINT on (n:NoConstraintsSetup) ASSERT n.name IS UNIQUE")
013ed651c3e8e7cfa4b8babefc2664644b928852
pybtex/bibtex/exceptions.py
pybtex/bibtex/exceptions.py
class BibTeXError(Exception): pass
from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
Make BibTeXError a subclass of PybtexError.
Make BibTeXError a subclass of PybtexError.
Python
mit
andreas-h/pybtex,chbrown/pybtex,andreas-h/pybtex,chbrown/pybtex
- class BibTeXError(Exception): + from pybtex.exceptions import PybtexError + + class BibTeXError(PybtexError): pass
Make BibTeXError a subclass of PybtexError.
## Code Before: class BibTeXError(Exception): pass ## Instruction: Make BibTeXError a subclass of PybtexError. ## Code After: from pybtex.exceptions import PybtexError class BibTeXError(PybtexError): pass
- class BibTeXError(Exception): + from pybtex.exceptions import PybtexError + + class BibTeXError(PybtexError): pass
d6432aa912f6d654f45c9bbfd27df46529816caf
rakuten/apis/travel_api.py
rakuten/apis/travel_api.py
import requests from .api_exception import RakutenApiException from .base_api import BaseApi class TravelApi(BaseApi): def __init__(self, options): super(TravelApi, self).__init__(options) def vacant_hotel_search(self, **kwargs): params = self._dict_to_camel_case(kwargs) params.update(self._default_params) url = self._make_url('/Travel/VacantHotelSearch/20131024') r = requests.get(url, params=params) if r.status_code == 200: result = r.json() hotels = [self._parse_hotel(r) for r in result['hotels']] return hotels else: raise RakutenApiException(r.status_code, r.text) def _parse_hotel(self, hotel_info): hotel = hotel_info['hotel'][0]['hotelBasicInfo'] room_infos = [r['roomInfo'][0]['roomBasicInfo'] for r in hotel_info['hotel'] if 'roomInfo' in r] hotel['room_infos'] = room_infos return hotel
import requests from .api_exception import RakutenApiException from .base_api import BaseApi class TravelApi(BaseApi): def __init__(self, options): super(TravelApi, self).__init__(options) self._default_params['datumType'] = 1 def vacant_hotel_search(self, **kwargs): params = self._dict_to_camel_case(kwargs) params.update(self._default_params) url = self._make_url('/Travel/VacantHotelSearch/20131024') r = requests.get(url, params=params) if r.status_code == 200: result = r.json() hotels = [self._parse_hotel(r) for r in result['hotels']] return hotels else: raise RakutenApiException(r.status_code, r.text) def _parse_hotel(self, hotel_info): hotel = hotel_info['hotel'][0]['hotelBasicInfo'] room_infos = [r['roomInfo'][0]['roomBasicInfo'] for r in hotel_info['hotel'] if 'roomInfo' in r] hotel['room_infos'] = room_infos return hotel
Change default format to normal longitude/latitude.
Change default format to normal longitude/latitude.
Python
mit
claudetech/python_rakuten
import requests from .api_exception import RakutenApiException from .base_api import BaseApi class TravelApi(BaseApi): def __init__(self, options): super(TravelApi, self).__init__(options) + self._default_params['datumType'] = 1 def vacant_hotel_search(self, **kwargs): params = self._dict_to_camel_case(kwargs) params.update(self._default_params) url = self._make_url('/Travel/VacantHotelSearch/20131024') r = requests.get(url, params=params) if r.status_code == 200: result = r.json() hotels = [self._parse_hotel(r) for r in result['hotels']] return hotels else: raise RakutenApiException(r.status_code, r.text) def _parse_hotel(self, hotel_info): hotel = hotel_info['hotel'][0]['hotelBasicInfo'] room_infos = [r['roomInfo'][0]['roomBasicInfo'] for r in hotel_info['hotel'] if 'roomInfo' in r] hotel['room_infos'] = room_infos return hotel
Change default format to normal longitude/latitude.
## Code Before: import requests from .api_exception import RakutenApiException from .base_api import BaseApi class TravelApi(BaseApi): def __init__(self, options): super(TravelApi, self).__init__(options) def vacant_hotel_search(self, **kwargs): params = self._dict_to_camel_case(kwargs) params.update(self._default_params) url = self._make_url('/Travel/VacantHotelSearch/20131024') r = requests.get(url, params=params) if r.status_code == 200: result = r.json() hotels = [self._parse_hotel(r) for r in result['hotels']] return hotels else: raise RakutenApiException(r.status_code, r.text) def _parse_hotel(self, hotel_info): hotel = hotel_info['hotel'][0]['hotelBasicInfo'] room_infos = [r['roomInfo'][0]['roomBasicInfo'] for r in hotel_info['hotel'] if 'roomInfo' in r] hotel['room_infos'] = room_infos return hotel ## Instruction: Change default format to normal longitude/latitude. ## Code After: import requests from .api_exception import RakutenApiException from .base_api import BaseApi class TravelApi(BaseApi): def __init__(self, options): super(TravelApi, self).__init__(options) self._default_params['datumType'] = 1 def vacant_hotel_search(self, **kwargs): params = self._dict_to_camel_case(kwargs) params.update(self._default_params) url = self._make_url('/Travel/VacantHotelSearch/20131024') r = requests.get(url, params=params) if r.status_code == 200: result = r.json() hotels = [self._parse_hotel(r) for r in result['hotels']] return hotels else: raise RakutenApiException(r.status_code, r.text) def _parse_hotel(self, hotel_info): hotel = hotel_info['hotel'][0]['hotelBasicInfo'] room_infos = [r['roomInfo'][0]['roomBasicInfo'] for r in hotel_info['hotel'] if 'roomInfo' in r] hotel['room_infos'] = room_infos return hotel
import requests from .api_exception import RakutenApiException from .base_api import BaseApi class TravelApi(BaseApi): def __init__(self, options): super(TravelApi, self).__init__(options) + self._default_params['datumType'] = 1 def vacant_hotel_search(self, **kwargs): params = self._dict_to_camel_case(kwargs) params.update(self._default_params) url = self._make_url('/Travel/VacantHotelSearch/20131024') r = requests.get(url, params=params) if r.status_code == 200: result = r.json() hotels = [self._parse_hotel(r) for r in result['hotels']] return hotels else: raise RakutenApiException(r.status_code, r.text) def _parse_hotel(self, hotel_info): hotel = hotel_info['hotel'][0]['hotelBasicInfo'] room_infos = [r['roomInfo'][0]['roomBasicInfo'] for r in hotel_info['hotel'] if 'roomInfo' in r] hotel['room_infos'] = room_infos return hotel
d4d76ae28ae8aa028c5a06f7499a20644b45b986
examples/on_startup.py
examples/on_startup.py
"""Provides an example of attaching an action on hug server startup""" import hug data = [] @hug.startup() def add_data(api): """Adds initial data to the api on startup""" data.append("It's working") @hug.startup() def add_more_data(api): """Adds initial data to the api on startup""" data.append("Even subsequent calls") @hug.get() def test(): """Returns all stored data""" return data
"""Provides an example of attaching an action on hug server startup""" import hug data = [] @hug.startup() def add_data(api): """Adds initial data to the api on startup""" data.append("It's working") @hug.startup() def add_more_data(api): """Adds initial data to the api on startup""" data.append("Even subsequent calls") @hug.cli() @hug.get() def test(): """Returns all stored data""" return data
Update example to demonstrate desired use case
Update example to demonstrate desired use case
Python
mit
MuhammadAlkarouri/hug,MuhammadAlkarouri/hug,timothycrosley/hug,MuhammadAlkarouri/hug,timothycrosley/hug,timothycrosley/hug
"""Provides an example of attaching an action on hug server startup""" import hug data = [] @hug.startup() def add_data(api): """Adds initial data to the api on startup""" data.append("It's working") @hug.startup() def add_more_data(api): """Adds initial data to the api on startup""" data.append("Even subsequent calls") + @hug.cli() @hug.get() def test(): """Returns all stored data""" return data
Update example to demonstrate desired use case
## Code Before: """Provides an example of attaching an action on hug server startup""" import hug data = [] @hug.startup() def add_data(api): """Adds initial data to the api on startup""" data.append("It's working") @hug.startup() def add_more_data(api): """Adds initial data to the api on startup""" data.append("Even subsequent calls") @hug.get() def test(): """Returns all stored data""" return data ## Instruction: Update example to demonstrate desired use case ## Code After: """Provides an example of attaching an action on hug server startup""" import hug data = [] @hug.startup() def add_data(api): """Adds initial data to the api on startup""" data.append("It's working") @hug.startup() def add_more_data(api): """Adds initial data to the api on startup""" data.append("Even subsequent calls") @hug.cli() @hug.get() def test(): """Returns all stored data""" return data
"""Provides an example of attaching an action on hug server startup""" import hug data = [] @hug.startup() def add_data(api): """Adds initial data to the api on startup""" data.append("It's working") @hug.startup() def add_more_data(api): """Adds initial data to the api on startup""" data.append("Even subsequent calls") + @hug.cli() @hug.get() def test(): """Returns all stored data""" return data
79d78e477e8cf64e7d4cd86470df3c251f6d8376
prequ/locations.py
prequ/locations.py
import os from shutil import rmtree from .click import secho from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the # preferred place to store caches for any platform. This has been addressed # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly # to the user when this directory is still found. LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') if os.path.exists(LEGACY_CACHE_DIR): secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') rmtree(LEGACY_CACHE_DIR)
from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ')
Remove migration code of pip-tools legacy cache
Remove migration code of pip-tools legacy cache It's not a responsibility of Prequ to remove legacy cache dir of pip-tools.
Python
bsd-2-clause
suutari-ai/prequ,suutari/prequ,suutari/prequ
- import os - from shutil import rmtree - - from .click import secho from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') - # NOTE - # We used to store the cache dir under ~/.pip-tools, which is not the - # preferred place to store caches for any platform. This has been addressed - # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly - # to the user when this directory is still found. - LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') - - if os.path.exists(LEGACY_CACHE_DIR): - secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') - rmtree(LEGACY_CACHE_DIR) -
Remove migration code of pip-tools legacy cache
## Code Before: import os from shutil import rmtree from .click import secho from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') # NOTE # We used to store the cache dir under ~/.pip-tools, which is not the # preferred place to store caches for any platform. This has been addressed # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly # to the user when this directory is still found. LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') if os.path.exists(LEGACY_CACHE_DIR): secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') rmtree(LEGACY_CACHE_DIR) ## Instruction: Remove migration code of pip-tools legacy cache ## Code After: from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ')
- import os - from shutil import rmtree - - from .click import secho from pip.utils.appdirs import user_cache_dir # The user_cache_dir helper comes straight from pip itself CACHE_DIR = user_cache_dir('prequ') - - # NOTE - # We used to store the cache dir under ~/.pip-tools, which is not the - # preferred place to store caches for any platform. This has been addressed - # in pip-tools==1.0.5, but to be good citizens, we point this out explicitly - # to the user when this directory is still found. - LEGACY_CACHE_DIR = os.path.expanduser('~/.pip-tools') - - if os.path.exists(LEGACY_CACHE_DIR): - secho('Removing old cache dir {} (new cache dir is {})'.format(LEGACY_CACHE_DIR, CACHE_DIR), fg='yellow') - rmtree(LEGACY_CACHE_DIR)
16cd3b501755c6d45b39b46ca8179cc0dc015125
main/admin/lan.py
main/admin/lan.py
from django.contrib import admin from django.forms import model_to_dict from django.utils.timezone import now from main.models import Lan, Event class EventInline(admin.TabularInline): model = Event show_change_link = True fields = ('name', 'url', 'start', 'end') @admin.register(Lan) class LanAdmin(admin.ModelAdmin): list_display = ('name', 'start', 'seats_count', 'is_open') fieldsets = ( ('Tider', { 'fields': (('start', 'end'), 'open', 'show_calendar') }), ('Pladser', { 'fields': ('seats',) }), ('Tekst', { 'fields': ('name', 'schedule', 'blurb') }), ('Betaling', { 'fields': ('paytypes', 'price', 'payphone') }), ('Madbestilling', { 'fields': ('food_open', 'food_phone') }), ) inlines = [ EventInline ] def get_changeform_initial_data(self, request): try: prev_lan = Lan.objects.filter(start__lt=now()).order_by("-start")[0] return model_to_dict(prev_lan, ['blurb', 'seats', 'schedule']) except (Lan.DoesNotExist, AttributeError, IndexError): return {}
from django.contrib import admin from django.forms import model_to_dict from django.utils.timezone import now from main.models import Lan, Event class EventInline(admin.TabularInline): model = Event show_change_link = True fields = ('name', 'url', 'start', 'end') @admin.register(Lan) class LanAdmin(admin.ModelAdmin): list_display = ('name', 'start', 'seats_count', 'is_open') fieldsets = ( ('Tider', { 'fields': (('start', 'end'), 'open', 'show_calendar') }), ('Pladser', { 'fields': ('seats',) }), ('Tekst', { 'fields': ('name', 'blurb') }), ('Betaling', { 'fields': ('paytypes', 'price', 'payphone') }), ('Madbestilling', { 'fields': ('food_open', 'food_phone') }), ) inlines = [ EventInline ] def get_changeform_initial_data(self, request): try: prev_lan = Lan.objects.filter(start__lt=now()).order_by("-start")[0] return model_to_dict(prev_lan, ['blurb', 'seats', 'schedule']) except (Lan.DoesNotExist, AttributeError, IndexError): return {}
Remove schedule from admin too
Remove schedule from admin too
Python
mit
bomjacob/htxaarhuslan,bomjacob/htxaarhuslan,bomjacob/htxaarhuslan
from django.contrib import admin from django.forms import model_to_dict from django.utils.timezone import now from main.models import Lan, Event class EventInline(admin.TabularInline): model = Event show_change_link = True fields = ('name', 'url', 'start', 'end') @admin.register(Lan) class LanAdmin(admin.ModelAdmin): list_display = ('name', 'start', 'seats_count', 'is_open') fieldsets = ( ('Tider', { 'fields': (('start', 'end'), 'open', 'show_calendar') }), ('Pladser', { 'fields': ('seats',) }), ('Tekst', { - 'fields': ('name', 'schedule', 'blurb') + 'fields': ('name', 'blurb') }), ('Betaling', { 'fields': ('paytypes', 'price', 'payphone') }), ('Madbestilling', { 'fields': ('food_open', 'food_phone') }), ) inlines = [ EventInline ] def get_changeform_initial_data(self, request): try: prev_lan = Lan.objects.filter(start__lt=now()).order_by("-start")[0] return model_to_dict(prev_lan, ['blurb', 'seats', 'schedule']) except (Lan.DoesNotExist, AttributeError, IndexError): return {}
Remove schedule from admin too
## Code Before: from django.contrib import admin from django.forms import model_to_dict from django.utils.timezone import now from main.models import Lan, Event class EventInline(admin.TabularInline): model = Event show_change_link = True fields = ('name', 'url', 'start', 'end') @admin.register(Lan) class LanAdmin(admin.ModelAdmin): list_display = ('name', 'start', 'seats_count', 'is_open') fieldsets = ( ('Tider', { 'fields': (('start', 'end'), 'open', 'show_calendar') }), ('Pladser', { 'fields': ('seats',) }), ('Tekst', { 'fields': ('name', 'schedule', 'blurb') }), ('Betaling', { 'fields': ('paytypes', 'price', 'payphone') }), ('Madbestilling', { 'fields': ('food_open', 'food_phone') }), ) inlines = [ EventInline ] def get_changeform_initial_data(self, request): try: prev_lan = Lan.objects.filter(start__lt=now()).order_by("-start")[0] return model_to_dict(prev_lan, ['blurb', 'seats', 'schedule']) except (Lan.DoesNotExist, AttributeError, IndexError): return {} ## Instruction: Remove schedule from admin too ## Code After: from django.contrib import admin from django.forms import model_to_dict from django.utils.timezone import now from main.models import Lan, Event class EventInline(admin.TabularInline): model = Event show_change_link = True fields = ('name', 'url', 'start', 'end') @admin.register(Lan) class LanAdmin(admin.ModelAdmin): list_display = ('name', 'start', 'seats_count', 'is_open') fieldsets = ( ('Tider', { 'fields': (('start', 'end'), 'open', 'show_calendar') }), ('Pladser', { 'fields': ('seats',) }), ('Tekst', { 'fields': ('name', 'blurb') }), ('Betaling', { 'fields': ('paytypes', 'price', 'payphone') }), ('Madbestilling', { 'fields': ('food_open', 'food_phone') }), ) inlines = [ EventInline ] def get_changeform_initial_data(self, request): try: prev_lan = Lan.objects.filter(start__lt=now()).order_by("-start")[0] return model_to_dict(prev_lan, ['blurb', 'seats', 'schedule']) except (Lan.DoesNotExist, AttributeError, IndexError): return {}
from django.contrib import admin from django.forms import model_to_dict from django.utils.timezone import now from main.models import Lan, Event class EventInline(admin.TabularInline): model = Event show_change_link = True fields = ('name', 'url', 'start', 'end') @admin.register(Lan) class LanAdmin(admin.ModelAdmin): list_display = ('name', 'start', 'seats_count', 'is_open') fieldsets = ( ('Tider', { 'fields': (('start', 'end'), 'open', 'show_calendar') }), ('Pladser', { 'fields': ('seats',) }), ('Tekst', { - 'fields': ('name', 'schedule', 'blurb') ? ------------ + 'fields': ('name', 'blurb') }), ('Betaling', { 'fields': ('paytypes', 'price', 'payphone') }), ('Madbestilling', { 'fields': ('food_open', 'food_phone') }), ) inlines = [ EventInline ] def get_changeform_initial_data(self, request): try: prev_lan = Lan.objects.filter(start__lt=now()).order_by("-start")[0] return model_to_dict(prev_lan, ['blurb', 'seats', 'schedule']) except (Lan.DoesNotExist, AttributeError, IndexError): return {}
fc87264fec2b13afb04fb89bfc7b2d4bbe2debdf
src/arc_utilities/ros_helpers.py
src/arc_utilities/ros_helpers.py
import rospy from threading import Lock class Listener: def __init__(self, topic_name, topic_type, lock=None): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic lock (Lock): optional lock object used when setting stored data """ self.data = None self.lock = lock if self.lock is None: self.lock = Lock() self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) def callback(self, msg): with self.lock: self.data = msg def get(self): """ Returns the latest msg from the subscribed topic """ with self.lock: return self.data
import rospy from threading import Lock class Listener: def __init__(self, topic_name, topic_type): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic lock (Lock): optional lock object used when setting stored data """ self.data = None self.lock = Lock() self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) def callback(self, msg): with self.lock: self.data = msg def get(self): """ Returns the latest msg from the subscribed topic """ with self.lock: return self.data
Remove optional lock input (I can't see when it would be useful) Document when Listener should be used
Remove optional lock input (I can't see when it would be useful) Document when Listener should be used
Python
bsd-2-clause
WPI-ARC/arc_utilities,UM-ARM-Lab/arc_utilities,UM-ARM-Lab/arc_utilities,WPI-ARC/arc_utilities,UM-ARM-Lab/arc_utilities,WPI-ARC/arc_utilities
import rospy from threading import Lock class Listener: - def __init__(self, topic_name, topic_type, lock=None): + def __init__(self, topic_name, topic_type): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. + + Listener does not consume the message + (for consuming behavior, use the standard ros callback pattern) + Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic lock (Lock): optional lock object used when setting stored data """ self.data = None - self.lock = lock - if self.lock is None: - self.lock = Lock() + self.lock = Lock() self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) def callback(self, msg): with self.lock: self.data = msg def get(self): """ Returns the latest msg from the subscribed topic """ with self.lock: return self.data
Remove optional lock input (I can't see when it would be useful) Document when Listener should be used
## Code Before: import rospy from threading import Lock class Listener: def __init__(self, topic_name, topic_type, lock=None): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic lock (Lock): optional lock object used when setting stored data """ self.data = None self.lock = lock if self.lock is None: self.lock = Lock() self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) def callback(self, msg): with self.lock: self.data = msg def get(self): """ Returns the latest msg from the subscribed topic """ with self.lock: return self.data ## Instruction: Remove optional lock input (I can't see when it would be useful) Document when Listener should be used ## Code After: import rospy from threading import Lock class Listener: def __init__(self, topic_name, topic_type): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. Listener does not consume the message (for consuming behavior, use the standard ros callback pattern) Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic lock (Lock): optional lock object used when setting stored data """ self.data = None self.lock = Lock() self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) def callback(self, msg): with self.lock: self.data = msg def get(self): """ Returns the latest msg from the subscribed topic """ with self.lock: return self.data
import rospy from threading import Lock class Listener: - def __init__(self, topic_name, topic_type, lock=None): ? ----------- + def __init__(self, topic_name, topic_type): """ Listener is a wrapper around a subscriber where the callback simply records the latest msg. + + Listener does not consume the message + (for consuming behavior, use the standard ros callback pattern) + Listener does not check timestamps of message headers Parameters: topic_name (str): name of topic to subscribe to topic_type (msg_type): type of message received on topic lock (Lock): optional lock object used when setting stored data """ self.data = None - self.lock = lock - if self.lock is None: - self.lock = Lock() ? ---- + self.lock = Lock() self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback) def callback(self, msg): with self.lock: self.data = msg def get(self): """ Returns the latest msg from the subscribed topic """ with self.lock: return self.data
2083c0079a70783deff54a7acd6f3ef6bba25302
tests/test_pyglmnet.py
tests/test_pyglmnet.py
import numpy as np import scipy.sparse as sps from sklearn.preprocessing import StandardScaler from numpy.testing import assert_allclose from pyglmnet import GLM def test_glmnet(): """Test glmnet.""" glm = GLM(distr='poisson') scaler = StandardScaler() n_samples, n_features = 10000, 100 density = 0.1 # coefficients beta0 = np.random.rand() beta = sps.rand(n_features, 1, density=density).toarray() X_train = np.random.normal(0.0, 1.0, [n_samples, n_features]) y_train = glm.simulate(beta0, beta, X_train) X_train = scaler.fit_transform(X_train) glm.fit(X_train, y_train) beta_ = glm.fit_params[-2]['beta'][:] assert_allclose(beta[:], beta_, atol=0.1) # check fit density_ = np.sum(beta_ > 0.1) / float(n_features) assert_allclose(density_, density, atol=0.05) # check density
import numpy as np import scipy.sparse as sps from sklearn.preprocessing import StandardScaler from numpy.testing import assert_allclose from pyglmnet import GLM def test_glmnet(): """Test glmnet.""" glm = GLM(distr='poisson') scaler = StandardScaler() n_samples, n_features = 10000, 100 density = 0.1 # coefficients beta0 = np.random.rand() beta = sps.rand(n_features, 1, density=density).toarray() X_train = np.random.normal(0.0, 1.0, [n_samples, n_features]) y_train = glm.simulate(beta0, beta, X_train) X_train = scaler.fit_transform(X_train) glm.fit(X_train, y_train) beta_ = glm.fit_[-2]['beta'][:] assert_allclose(beta[:], beta_, atol=0.1) # check fit density_ = np.sum(beta_ > 0.1) / float(n_features) assert_allclose(density_, density, atol=0.05) # check density def test_multinomial_gradient(): """Gradient of intercept params is different""" glm = GLM(distr='multinomial') X = np.array([[1,2,3], [4,5,6]]) y = np.array([1,2]) beta = np.zeros([4, 2]) grad_beta0, grad_beta = glm.grad_L2loss(beta[0], beta[1:], 0, X, y) assert grad_beta0[0] != grad_beta0[1]
Fix glmnet test and add multinomial gradient test
Fix glmnet test and add multinomial gradient test
Python
mit
the872/pyglmnet,glm-tools/pyglmnet,pavanramkumar/pyglmnet
import numpy as np import scipy.sparse as sps from sklearn.preprocessing import StandardScaler from numpy.testing import assert_allclose from pyglmnet import GLM def test_glmnet(): """Test glmnet.""" glm = GLM(distr='poisson') scaler = StandardScaler() n_samples, n_features = 10000, 100 density = 0.1 # coefficients beta0 = np.random.rand() beta = sps.rand(n_features, 1, density=density).toarray() X_train = np.random.normal(0.0, 1.0, [n_samples, n_features]) y_train = glm.simulate(beta0, beta, X_train) X_train = scaler.fit_transform(X_train) glm.fit(X_train, y_train) - beta_ = glm.fit_params[-2]['beta'][:] + beta_ = glm.fit_[-2]['beta'][:] assert_allclose(beta[:], beta_, atol=0.1) # check fit density_ = np.sum(beta_ > 0.1) / float(n_features) assert_allclose(density_, density, atol=0.05) # check density + def test_multinomial_gradient(): + """Gradient of intercept params is different""" + glm = GLM(distr='multinomial') + X = np.array([[1,2,3], [4,5,6]]) + y = np.array([1,2]) + beta = np.zeros([4, 2]) + grad_beta0, grad_beta = glm.grad_L2loss(beta[0], beta[1:], 0, X, y) + assert grad_beta0[0] != grad_beta0[1] +
Fix glmnet test and add multinomial gradient test
## Code Before: import numpy as np import scipy.sparse as sps from sklearn.preprocessing import StandardScaler from numpy.testing import assert_allclose from pyglmnet import GLM def test_glmnet(): """Test glmnet.""" glm = GLM(distr='poisson') scaler = StandardScaler() n_samples, n_features = 10000, 100 density = 0.1 # coefficients beta0 = np.random.rand() beta = sps.rand(n_features, 1, density=density).toarray() X_train = np.random.normal(0.0, 1.0, [n_samples, n_features]) y_train = glm.simulate(beta0, beta, X_train) X_train = scaler.fit_transform(X_train) glm.fit(X_train, y_train) beta_ = glm.fit_params[-2]['beta'][:] assert_allclose(beta[:], beta_, atol=0.1) # check fit density_ = np.sum(beta_ > 0.1) / float(n_features) assert_allclose(density_, density, atol=0.05) # check density ## Instruction: Fix glmnet test and add multinomial gradient test ## Code After: import numpy as np import scipy.sparse as sps from sklearn.preprocessing import StandardScaler from numpy.testing import assert_allclose from pyglmnet import GLM def test_glmnet(): """Test glmnet.""" glm = GLM(distr='poisson') scaler = StandardScaler() n_samples, n_features = 10000, 100 density = 0.1 # coefficients beta0 = np.random.rand() beta = sps.rand(n_features, 1, density=density).toarray() X_train = np.random.normal(0.0, 1.0, [n_samples, n_features]) y_train = glm.simulate(beta0, beta, X_train) X_train = scaler.fit_transform(X_train) glm.fit(X_train, y_train) beta_ = glm.fit_[-2]['beta'][:] assert_allclose(beta[:], beta_, atol=0.1) # check fit density_ = np.sum(beta_ > 0.1) / float(n_features) assert_allclose(density_, density, atol=0.05) # check density def test_multinomial_gradient(): """Gradient of intercept params is different""" glm = GLM(distr='multinomial') X = np.array([[1,2,3], [4,5,6]]) y = np.array([1,2]) beta = np.zeros([4, 2]) grad_beta0, grad_beta = glm.grad_L2loss(beta[0], beta[1:], 0, X, y) assert grad_beta0[0] != grad_beta0[1]
import numpy as np import scipy.sparse as sps from sklearn.preprocessing import StandardScaler from numpy.testing import assert_allclose from pyglmnet import GLM def test_glmnet(): """Test glmnet.""" glm = GLM(distr='poisson') scaler = StandardScaler() n_samples, n_features = 10000, 100 density = 0.1 # coefficients beta0 = np.random.rand() beta = sps.rand(n_features, 1, density=density).toarray() X_train = np.random.normal(0.0, 1.0, [n_samples, n_features]) y_train = glm.simulate(beta0, beta, X_train) X_train = scaler.fit_transform(X_train) glm.fit(X_train, y_train) - beta_ = glm.fit_params[-2]['beta'][:] ? ------ + beta_ = glm.fit_[-2]['beta'][:] assert_allclose(beta[:], beta_, atol=0.1) # check fit density_ = np.sum(beta_ > 0.1) / float(n_features) assert_allclose(density_, density, atol=0.05) # check density + + def test_multinomial_gradient(): + """Gradient of intercept params is different""" + glm = GLM(distr='multinomial') + X = np.array([[1,2,3], [4,5,6]]) + y = np.array([1,2]) + beta = np.zeros([4, 2]) + grad_beta0, grad_beta = glm.grad_L2loss(beta[0], beta[1:], 0, X, y) + assert grad_beta0[0] != grad_beta0[1]
eb453010915f6700edd1baa0febcc634deec81dc
src/viewsapp/views.py
src/viewsapp/views.py
from decorator_plus import ( require_form_methods, require_safe_methods) from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_safe_methods def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_form_methods def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
from decorator_plus import require_http_methods from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
Switch to using require_http_methods decorator.
Switch to using require_http_methods decorator.
Python
bsd-2-clause
jambonrose/djangocon2015-views,jambonrose/djangocon2015-views
+ from decorator_plus import require_http_methods - from decorator_plus import ( - require_form_methods, require_safe_methods) from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel - @require_safe_methods + @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) - @require_form_methods + @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
Switch to using require_http_methods decorator.
## Code Before: from decorator_plus import ( require_form_methods, require_safe_methods) from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_safe_methods def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_form_methods def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form}) ## Instruction: Switch to using require_http_methods decorator. ## Code After: from decorator_plus import require_http_methods from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
+ from decorator_plus import require_http_methods - from decorator_plus import ( - require_form_methods, require_safe_methods) from django.shortcuts import ( get_object_or_404, redirect, render) from .forms import ExampleForm from .models import ExampleModel - @require_safe_methods + @require_http_methods(['GET']) def model_detail(request, *args, **kwargs): request_slug = kwargs.get('slug') example_obj = get_object_or_404( ExampleModel, slug=request_slug) return render( request, 'viewsapp/detail.html', {'object': example_obj}) - @require_form_methods + @require_http_methods(['GET', 'POST']) def model_create(request, *args, **kwargs): if request.method == 'POST': form = ExampleForm(request.POST) if form.is_valid(): new_obj = form.save() return redirect(new_obj) else: form = ExampleForm() return render( request, 'viewsapp/form.html', {'form': form})
e50fdd79a49adce75559ea07024d056b6b386761
docs/config/all.py
docs/config/all.py
from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' is_prerelease = True hide_page_contents = ('search', '404', 'contents')
from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' hide_page_contents = ('search', '404', 'contents')
Remove pre-release flag as 2.x is mainline now
Remove pre-release flag as 2.x is mainline now
Python
mit
cakephp/chronos
from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' - is_prerelease = True hide_page_contents = ('search', '404', 'contents')
Remove pre-release flag as 2.x is mainline now
## Code Before: from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' is_prerelease = True hide_page_contents = ('search', '404', 'contents') ## Instruction: Remove pre-release flag as 2.x is mainline now ## Code After: from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' hide_page_contents = ('search', '404', 'contents')
from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' - is_prerelease = True hide_page_contents = ('search', '404', 'contents')
03d8a4e20ee4b6fd49495b7b047ea78d0b9a5bb4
dmoj/graders/base.py
dmoj/graders/base.py
class BaseGrader(object): def __init__(self, judge, problem, language, source): self.source = source self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None def grade(self, case): raise NotImplementedError def _generate_binary(self): raise NotImplementedError def terminate_grading(self): self._terminate_grading = True if self._current_proc: try: self._current_proc.kill() except OSError: pass pass
class BaseGrader(object): def __init__(self, judge, problem, language, source): if isinstance(source, unicode): source = source.encode('utf-8') self.source = source self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None def grade(self, case): raise NotImplementedError def _generate_binary(self): raise NotImplementedError def terminate_grading(self): self._terminate_grading = True if self._current_proc: try: self._current_proc.kill() except OSError: pass pass
Make source utf-8 encoded bytes.
Make source utf-8 encoded bytes.
Python
agpl-3.0
DMOJ/judge,DMOJ/judge,DMOJ/judge
class BaseGrader(object): def __init__(self, judge, problem, language, source): + if isinstance(source, unicode): + source = source.encode('utf-8') self.source = source self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None def grade(self, case): raise NotImplementedError def _generate_binary(self): raise NotImplementedError def terminate_grading(self): self._terminate_grading = True if self._current_proc: try: self._current_proc.kill() except OSError: pass pass
Make source utf-8 encoded bytes.
## Code Before: class BaseGrader(object): def __init__(self, judge, problem, language, source): self.source = source self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None def grade(self, case): raise NotImplementedError def _generate_binary(self): raise NotImplementedError def terminate_grading(self): self._terminate_grading = True if self._current_proc: try: self._current_proc.kill() except OSError: pass pass ## Instruction: Make source utf-8 encoded bytes. ## Code After: class BaseGrader(object): def __init__(self, judge, problem, language, source): if isinstance(source, unicode): source = source.encode('utf-8') self.source = source self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None def grade(self, case): raise NotImplementedError def _generate_binary(self): raise NotImplementedError def terminate_grading(self): self._terminate_grading = True if self._current_proc: try: self._current_proc.kill() except OSError: pass pass
class BaseGrader(object): def __init__(self, judge, problem, language, source): + if isinstance(source, unicode): + source = source.encode('utf-8') self.source = source self.language = language self.problem = problem self.judge = judge self.binary = self._generate_binary() self._terminate_grading = False self._current_proc = None def grade(self, case): raise NotImplementedError def _generate_binary(self): raise NotImplementedError def terminate_grading(self): self._terminate_grading = True if self._current_proc: try: self._current_proc.kill() except OSError: pass pass
0722b517f5b5b9a84b7521b6b7d350cbc6537948
src/core/models.py
src/core/models.py
from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. """ presumed_type = super().db_type(connection) if presumed_type == 'integer': return 'bigint' return presumed_type
from django.apps import apps from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. Django's AutoField is actually an IntegerField (SQL integer field), but in some cases we are using bigint on PostgreSQL without Django knowing it. So we continue to trick Django here, swapping its field type detection, and just tells it to use bigint. :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type
Add some explaination on BigForeignKey
Add some explaination on BigForeignKey
Python
mit
uranusjr/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,uranusjr/pycontw2016
+ from django.apps import apps from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. + + Django's AutoField is actually an IntegerField (SQL integer field), + but in some cases we are using bigint on PostgreSQL without Django + knowing it. So we continue to trick Django here, swapping its field + type detection, and just tells it to use bigint. + + :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) - if presumed_type == 'integer': + if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type
Add some explaination on BigForeignKey
## Code Before: from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. """ presumed_type = super().db_type(connection) if presumed_type == 'integer': return 'bigint' return presumed_type ## Instruction: Add some explaination on BigForeignKey ## Code After: from django.apps import apps from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. Django's AutoField is actually an IntegerField (SQL integer field), but in some cases we are using bigint on PostgreSQL without Django knowing it. So we continue to trick Django here, swapping its field type detection, and just tells it to use bigint. :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type
+ from django.apps import apps from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. + + Django's AutoField is actually an IntegerField (SQL integer field), + but in some cases we are using bigint on PostgreSQL without Django + knowing it. So we continue to trick Django here, swapping its field + type detection, and just tells it to use bigint. + + :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) - if presumed_type == 'integer': + if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type