commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
1eedac5229e5a9128c4fbc09f7d7b97a3859e9b9
django_sse/views.py
django_sse/views.py
from django.views.generic import View from django.views.decorators.csrf import csrf_exempt from django.http import HttpResponse try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
from django.views.generic import View from django.views.decorators.csrf import csrf_exempt try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
Remove duplicate import. (Thanks to MechanisM)
Remove duplicate import. (Thanks to MechanisM)
Python
bsd-3-clause
chadmiller/django-sse,niwinz/django-sse,chadmiller/django-sse
python
## Code Before: from django.views.generic import View from django.views.decorators.csrf import csrf_exempt from django.http import HttpResponse try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError ## Instruction: Remove duplicate import. (Thanks to MechanisM) ## Code After: from django.views.generic import View from django.views.decorators.csrf import csrf_exempt try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
# ... existing code ... from django.views.generic import View from django.views.decorators.csrf import csrf_exempt try: from django.http import StreamingHttpResponse as HttpResponse # ... rest of the code ...
7211ecb704a52f7dfe0984b1bb70305367f5104c
tools/gyp_dart.py
tools/gyp_dart.py
import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): component = 'all' if len(sys.argv) == 2: component = sys.argv[1] component_gyp_files = { 'all' : 'dart/dart.gyp', 'runtime' : 'dart/runtime/dart-runtime.gyp', } args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py', '--depth=dart', '-Idart/tools/gyp/all.gypi', component_gyp_files[component]] if sys.platform == 'win32': # Generate Visual Studio 2010 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2010']) sys.exit(execute(args)) if __name__ == '__main__': main()
import os import subprocess import sys SCRIPT_DIR = os.path.dirname(sys.argv[0]) DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..')) def execute(args): process = subprocess.Popen(args, cwd=DART_ROOT) process.wait() return process.returncode def main(): component = 'all' if len(sys.argv) == 2: component = sys.argv[1] component_gyp_files = { 'all' : 'dart.gyp', 'runtime' : 'runtime/dart-runtime.gyp', } args = ['python', '-S', 'third_party/gyp/gyp_main.py', '--depth=.', '-Itools/gyp/all.gypi', component_gyp_files[component]] if sys.platform == 'win32': # Generate Visual Studio 2010 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2010']) sys.exit(execute(args)) if __name__ == '__main__': main()
Make tools/gypdart independent of the directory from which it is called.
Make tools/gypdart independent of the directory from which it is called. This enables more independence in the way you can structure your checkout using gclient. BUG= [email protected] Review URL: https://codereview.chromium.org//1023893003 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5
Python
bsd-3-clause
dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk
python
## Code Before: import os import subprocess import sys def execute(args): process = subprocess.Popen(args) process.wait() return process.returncode def main(): component = 'all' if len(sys.argv) == 2: component = sys.argv[1] component_gyp_files = { 'all' : 'dart/dart.gyp', 'runtime' : 'dart/runtime/dart-runtime.gyp', } args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py', '--depth=dart', '-Idart/tools/gyp/all.gypi', component_gyp_files[component]] if sys.platform == 'win32': # Generate Visual Studio 2010 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2010']) sys.exit(execute(args)) if __name__ == '__main__': main() ## Instruction: Make tools/gypdart independent of the directory from which it is called. This enables more independence in the way you can structure your checkout using gclient. BUG= [email protected] Review URL: https://codereview.chromium.org//1023893003 git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5 ## Code After: import os import subprocess import sys SCRIPT_DIR = os.path.dirname(sys.argv[0]) DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..')) def execute(args): process = subprocess.Popen(args, cwd=DART_ROOT) process.wait() return process.returncode def main(): component = 'all' if len(sys.argv) == 2: component = sys.argv[1] component_gyp_files = { 'all' : 'dart.gyp', 'runtime' : 'runtime/dart-runtime.gyp', } args = ['python', '-S', 'third_party/gyp/gyp_main.py', '--depth=.', '-Itools/gyp/all.gypi', component_gyp_files[component]] if sys.platform == 'win32': # Generate Visual Studio 2010 compatible files by default. if not os.environ.get('GYP_MSVS_VERSION'): args.extend(['-G', 'msvs_version=2010']) sys.exit(execute(args)) if __name__ == '__main__': main()
... import subprocess import sys SCRIPT_DIR = os.path.dirname(sys.argv[0]) DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..')) def execute(args): process = subprocess.Popen(args, cwd=DART_ROOT) process.wait() return process.returncode ... component = sys.argv[1] component_gyp_files = { 'all' : 'dart.gyp', 'runtime' : 'runtime/dart-runtime.gyp', } args = ['python', '-S', 'third_party/gyp/gyp_main.py', '--depth=.', '-Itools/gyp/all.gypi', component_gyp_files[component]] if sys.platform == 'win32': ...
e4427016abdc7ef146cd7550f2ac1dace07be442
plinky.py
plinky.py
from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == "__main__": app.run(debug=True)
from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == "__main__": app.run()
Remove debug flag from app
Remove debug flag from app
Python
mit
RaspberryPiFoundation/plinky,CodeClub/plinky,codecleaner/plinky,codecleaner/plinky,CodeClub/plinky,martinpeck/plinky,martinpeck/plinky,RaspberryPiFoundation/plinky,RaspberryPiFoundation/plinky
python
## Code Before: from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == "__main__": app.run(debug=True) ## Instruction: Remove debug flag from app ## Code After: from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == "__main__": app.run()
// ... existing code ... return "Hello World!" if __name__ == "__main__": app.run() // ... rest of the code ...
2c02816c05f3863ef76b3a412ac5bad9eecfafdd
testrepository/tests/test_setup.py
testrepository/tests/test_setup.py
"""Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS))
"""Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
Make setup.py smoke test more specific again as requested in review
Make setup.py smoke test more specific again as requested in review
Python
apache-2.0
masayukig/stestr,masayukig/stestr,mtreinish/stestr,mtreinish/stestr
python
## Code Before: """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("...running bdist...", doctest.ELLIPSIS)) ## Instruction: Make setup.py smoke test more specific again as requested in review ## Code After: """Tests for setup.py.""" import doctest import os import subprocess import sys from testtools import ( TestCase, ) from testtools.matchers import ( DocTestMatches, ) class TestCanSetup(TestCase): def test_bdist(self): # Single smoke test to make sure we can build a package. path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS))
# ... existing code ... path = os.path.join(os.path.dirname(__file__), '..', '..', 'setup.py') proc = subprocess.Popen([sys.executable, path, 'bdist'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) output, _ = proc.communicate() self.assertEqual(0, proc.returncode) self.assertThat(output, DocTestMatches("""... running install_scripts ... adding '...testr' ...""", doctest.ELLIPSIS)) # ... rest of the code ...
baacda228682a50acc5a4528d43f5d3a88c7c6ec
salt/client/netapi.py
salt/client/netapi.py
''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) multiprocessing.Process(target=netapi[fun]).start()
''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing import signal # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.processes = [] def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) p = multiprocessing.Process(target=netapi[fun]) p.start() self.processes.append(p) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) def kill_children(self, *args): ''' Kill all of the children ''' for p in self.processes: p.terminate() p.join()
Make sure to not leave hanging children processes if the parent is killed
Make sure to not leave hanging children processes if the parent is killed
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
python
## Code Before: ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) multiprocessing.Process(target=netapi[fun]).start() ## Instruction: Make sure to not leave hanging children processes if the parent is killed ## Code After: ''' The main entry point for salt-api ''' # Import python libs import logging import multiprocessing import signal # Import salt-api libs import salt.loader logger = logging.getLogger(__name__) class NetapiClient(object): ''' Start each netapi module that is configured to run ''' def __init__(self, opts): self.opts = opts self.processes = [] def run(self): ''' Load and start all available api modules ''' netapi = salt.loader.netapi(self.opts) for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) p = multiprocessing.Process(target=netapi[fun]) p.start() self.processes.append(p) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) def kill_children(self, *args): ''' Kill all of the children ''' for p in self.processes: p.terminate() p.join()
// ... existing code ... # Import python libs import logging import multiprocessing import signal # Import salt-api libs import salt.loader // ... modified code ... ''' def __init__(self, opts): self.opts = opts self.processes = [] def run(self): ''' ... for fun in netapi: if fun.endswith('.start'): logger.info("Starting '{0}' api module".format(fun)) p = multiprocessing.Process(target=netapi[fun]) p.start() self.processes.append(p) # make sure to kill the subprocesses if the parent is killed signal.signal(signal.SIGTERM, self.kill_children) def kill_children(self, *args): ''' Kill all of the children ''' for p in self.processes: p.terminate() p.join() // ... rest of the code ...
2ad4dd2fe877248b33aefa4465352710f95d953a
djlotrek/decorators.py
djlotrek/decorators.py
from functools import wraps from django.conf import settings import requests def check_recaptcha(view_func): @wraps(view_func) def _wrapped_view(request, *args, **kwargs): request.recaptcha_is_valid = None if request.method == 'POST': recaptcha_response = request.POST.get('g-recaptcha-response') data = { 'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY, 'response': recaptcha_response } r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data) result = r.json() if result['success']: request.recaptcha_is_valid = True else: request.recaptcha_is_valid = False print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes'])) return view_func(request, *args, **kwargs) return _wrapped_view
from functools import wraps from django.conf import settings import requests def check_recaptcha(view_func): """Chech that the entered recaptcha data is correct""" @wraps(view_func) def _wrapped_view(request, *args, **kwargs): request.recaptcha_is_valid = None if request.method == 'POST': recaptcha_response = request.POST.get('g-recaptcha-response') data = { 'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY, 'response': recaptcha_response } r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data) result = r.json() if result['success']: request.recaptcha_is_valid = True else: request.recaptcha_is_valid = False print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes'])) return view_func(request, *args, **kwargs) return _wrapped_view
Add docstring to recaptcha check
Add docstring to recaptcha check
Python
mit
lotrekagency/djlotrek,lotrekagency/djlotrek
python
## Code Before: from functools import wraps from django.conf import settings import requests def check_recaptcha(view_func): @wraps(view_func) def _wrapped_view(request, *args, **kwargs): request.recaptcha_is_valid = None if request.method == 'POST': recaptcha_response = request.POST.get('g-recaptcha-response') data = { 'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY, 'response': recaptcha_response } r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data) result = r.json() if result['success']: request.recaptcha_is_valid = True else: request.recaptcha_is_valid = False print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes'])) return view_func(request, *args, **kwargs) return _wrapped_view ## Instruction: Add docstring to recaptcha check ## Code After: from functools import wraps from django.conf import settings import requests def check_recaptcha(view_func): """Chech that the entered recaptcha data is correct""" @wraps(view_func) def _wrapped_view(request, *args, **kwargs): request.recaptcha_is_valid = None if request.method == 'POST': recaptcha_response = request.POST.get('g-recaptcha-response') data = { 'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY, 'response': recaptcha_response } r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data) result = r.json() if result['success']: request.recaptcha_is_valid = True else: request.recaptcha_is_valid = False print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes'])) return view_func(request, *args, **kwargs) return _wrapped_view
... def check_recaptcha(view_func): """Chech that the entered recaptcha data is correct""" @wraps(view_func) def _wrapped_view(request, *args, **kwargs): request.recaptcha_is_valid = None ...
a6aae0781d11ef768b27bd30a443f0e396a1f122
org.eclipse.january/src/org/eclipse/january/dataset/DataListenerDelegate.java
org.eclipse.january/src/org/eclipse/january/dataset/DataListenerDelegate.java
/*- * Copyright 2015, 2016 Diamond Light Source Ltd. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.january.dataset; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Class used by DynamicDataset to delegate */ public class DataListenerDelegate { private List<IDataListener> listeners; public DataListenerDelegate() { listeners = Collections.synchronizedList(new ArrayList<IDataListener>()); } public void addDataListener(IDataListener l) { synchronized (listeners) { if (!listeners.contains(l)) { listeners.add(l); } } } public void removeDataListener(IDataListener l) { listeners.remove(l); } public void fire(DataEvent evt) { synchronized (listeners) { for (Iterator<IDataListener> iterator = listeners.iterator(); iterator.hasNext();) { iterator.next().dataChangePerformed(evt); } } } public boolean hasDataListeners() { return listeners.size() > 0; } public void clear() { listeners.clear(); } }
/*- * Copyright 2015, 2016 Diamond Light Source Ltd. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.january.dataset; import java.util.Collections; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * Class used by DynamicDataset to delegate */ public class DataListenerDelegate { private Set<IDataListener> listeners; public DataListenerDelegate() { listeners = Collections.newSetFromMap(new ConcurrentHashMap<IDataListener, Boolean>()); } public void addDataListener(IDataListener l) { listeners.add(l); } public void removeDataListener(IDataListener l) { listeners.remove(l); } public void fire(DataEvent evt) { for (IDataListener listener : listeners) { listener.dataChangePerformed(evt); } } public boolean hasDataListeners() { return listeners.size() > 0; } public void clear() { listeners.clear(); } }
Improve thread safety around adding and removing IDataListener's
Improve thread safety around adding and removing IDataListener's Previously deadlock was possible if a listener was running in the same thread as a call attempting to add or remove listeners. As might be commonly the case for the UI thread. Also replaced the listeners List with a Set to prevent duplicate listeners being added. Using a set backed by a ConcurrentHashMap should allow all operation to complete safely without requiring external synchronized, this allows the methods in the class to be simplified. Signed-off-by: James Mudd <[email protected]>
Java
epl-1.0
jamesmudd/dawnsci,jamesmudd/dawnsci,xen-0/dawnsci,jamesmudd/dawnsci,DawnScience/dawnsci,eclipse/dawnsci,belkassaby/dawnsci
java
## Code Before: /*- * Copyright 2015, 2016 Diamond Light Source Ltd. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.january.dataset; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Class used by DynamicDataset to delegate */ public class DataListenerDelegate { private List<IDataListener> listeners; public DataListenerDelegate() { listeners = Collections.synchronizedList(new ArrayList<IDataListener>()); } public void addDataListener(IDataListener l) { synchronized (listeners) { if (!listeners.contains(l)) { listeners.add(l); } } } public void removeDataListener(IDataListener l) { listeners.remove(l); } public void fire(DataEvent evt) { synchronized (listeners) { for (Iterator<IDataListener> iterator = listeners.iterator(); iterator.hasNext();) { iterator.next().dataChangePerformed(evt); } } } public boolean hasDataListeners() { return listeners.size() > 0; } public void clear() { listeners.clear(); } } ## Instruction: Improve thread safety around adding and removing IDataListener's Previously deadlock was possible if a listener was running in the same thread as a call attempting to add or remove listeners. As might be commonly the case for the UI thread. Also replaced the listeners List with a Set to prevent duplicate listeners being added. Using a set backed by a ConcurrentHashMap should allow all operation to complete safely without requiring external synchronized, this allows the methods in the class to be simplified. Signed-off-by: James Mudd <[email protected]> ## Code After: /*- * Copyright 2015, 2016 Diamond Light Source Ltd. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.january.dataset; import java.util.Collections; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * Class used by DynamicDataset to delegate */ public class DataListenerDelegate { private Set<IDataListener> listeners; public DataListenerDelegate() { listeners = Collections.newSetFromMap(new ConcurrentHashMap<IDataListener, Boolean>()); } public void addDataListener(IDataListener l) { listeners.add(l); } public void removeDataListener(IDataListener l) { listeners.remove(l); } public void fire(DataEvent evt) { for (IDataListener listener : listeners) { listener.dataChangePerformed(evt); } } public boolean hasDataListeners() { return listeners.size() > 0; } public void clear() { listeners.clear(); } }
// ... existing code ... package org.eclipse.january.dataset; import java.util.Collections; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; /** * Class used by DynamicDataset to delegate // ... modified code ... */ public class DataListenerDelegate { private Set<IDataListener> listeners; public DataListenerDelegate() { listeners = Collections.newSetFromMap(new ConcurrentHashMap<IDataListener, Boolean>()); } public void addDataListener(IDataListener l) { listeners.add(l); } public void removeDataListener(IDataListener l) { ... } public void fire(DataEvent evt) { for (IDataListener listener : listeners) { listener.dataChangePerformed(evt); } } ... public void clear() { listeners.clear(); } } // ... rest of the code ...
b624552af638652147ca8b5e49ca109a4723dca1
MoMMI/Modules/development.py
MoMMI/Modules/development.py
from discord import Message from typing import re as typing_re from MoMMI.commands import command from MoMMI.master import master from MoMMI.server import MChannel @command("reload", "reload", roles=["owner"]) async def reload(channel: MChannel, match: typing_re.Match, message: Message): await master.reload_modules() @command("modules", "modules", roles=["owner"]) async def modules(channel: MChannel, match: typing_re.Match, message: Message): msg = "```" for module in channel.server.master.modules.values(): msg += f"{module.name}:\n" for handler in module.handlers.values(): msg += f"* {handler.name} ({type(handler)})\n" msg += "```" await channel.send(msg)
from discord import Message from typing import re as typing_re from MoMMI.commands import command from MoMMI.master import master from MoMMI.server import MChannel from MoMMI.role import MRoleType @command("reload", "reload", roles=[MRoleType.OWNER]) async def reload(channel: MChannel, match: typing_re.Match, message: Message): await master.reload_modules() @command("modules", "modules", roles=[MRoleType.OWNER]) async def modules(channel: MChannel, match: typing_re.Match, message: Message): msg = "```" for module in channel.server.master.modules.values(): msg += f"{module.name}:\n" for handler in module.handlers.values(): msg += f"* {handler.name} ({type(handler)})\n" msg += "```" await channel.send(msg)
Fix dev commands using string roles.
Fix dev commands using string roles.
Python
mit
PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI
python
## Code Before: from discord import Message from typing import re as typing_re from MoMMI.commands import command from MoMMI.master import master from MoMMI.server import MChannel @command("reload", "reload", roles=["owner"]) async def reload(channel: MChannel, match: typing_re.Match, message: Message): await master.reload_modules() @command("modules", "modules", roles=["owner"]) async def modules(channel: MChannel, match: typing_re.Match, message: Message): msg = "```" for module in channel.server.master.modules.values(): msg += f"{module.name}:\n" for handler in module.handlers.values(): msg += f"* {handler.name} ({type(handler)})\n" msg += "```" await channel.send(msg) ## Instruction: Fix dev commands using string roles. ## Code After: from discord import Message from typing import re as typing_re from MoMMI.commands import command from MoMMI.master import master from MoMMI.server import MChannel from MoMMI.role import MRoleType @command("reload", "reload", roles=[MRoleType.OWNER]) async def reload(channel: MChannel, match: typing_re.Match, message: Message): await master.reload_modules() @command("modules", "modules", roles=[MRoleType.OWNER]) async def modules(channel: MChannel, match: typing_re.Match, message: Message): msg = "```" for module in channel.server.master.modules.values(): msg += f"{module.name}:\n" for handler in module.handlers.values(): msg += f"* {handler.name} ({type(handler)})\n" msg += "```" await channel.send(msg)
// ... existing code ... from MoMMI.commands import command from MoMMI.master import master from MoMMI.server import MChannel from MoMMI.role import MRoleType @command("reload", "reload", roles=[MRoleType.OWNER]) async def reload(channel: MChannel, match: typing_re.Match, message: Message): await master.reload_modules() @command("modules", "modules", roles=[MRoleType.OWNER]) async def modules(channel: MChannel, match: typing_re.Match, message: Message): msg = "```" for module in channel.server.master.modules.values(): // ... rest of the code ...
aa38ccc9bd4b1dbd1ebaa5a85ad964b0b98a215b
src/com/rainbof/nyxtools/NyxTools.java
src/com/rainbof/nyxtools/NyxTools.java
package com.rainbof.nyxtools; import android.content.Context; public class NyxTools { private volatile static NyxTools mInstance; private NyxToolsPersistence persistence; public static NyxTools getInstance(Context _context) { if (mInstance == null) { synchronized (NyxTools.class) { mInstance = new NyxTools(_context); } } return mInstance; } protected NyxTools(Context _context) { if (_context.getApplicationContext() != null) _context = _context.getApplicationContext(); persistence = new NyxToolsPersistence(_context); } public boolean setUsername(String username) { return persistence.setUsername(username); } public String getUsername() { return persistence.getUsername(); } public boolean setAuthToken(String authToken) { return persistence.setAuthToken(authToken); } public String getAuthToken() { return persistence.getAuthToken(); } public String getUserAgentPrefix(){ return persistence.getUserAgentPrefix(); } public boolean setUserAgentPrefix(String uaPrefix){ return persistence.setUserAgentPrefix(uaPrefix); } }
package com.rainbof.nyxtools; import android.content.Context; import android.util.Log; import com.rainbof.nyxtools.util.S; public class NyxTools { private volatile static NyxTools mInstance; private NyxToolsPersistence persistence; public static NyxTools getInstance(Context _context) { if (mInstance == null) { synchronized (NyxTools.class) { mInstance = new NyxTools(_context); } } return mInstance; } protected NyxTools(Context _context) { try { if (_context.getApplicationContext() != null) _context = _context.getApplicationContext(); } catch (Exception e) { Log.e(S.TAG + "getApplicationContext", "You must call getInstance() after super.onCreate(...)"); throw new IllegalStateException(); } persistence = new NyxToolsPersistence(_context); } public boolean setUsername(String username) { return persistence.setUsername(username); } public String getUsername() { return persistence.getUsername(); } public boolean setAuthToken(String authToken) { return persistence.setAuthToken(authToken); } public String getAuthToken() { return persistence.getAuthToken(); } public String getUserAgentPrefix() { return persistence.getUserAgentPrefix(); } public boolean setUserAgentPrefix(String uaPrefix) { return persistence.setUserAgentPrefix(uaPrefix); } }
Fix for calling getInstance before ApplicationContext is available
Fix for calling getInstance before ApplicationContext is available
Java
apache-2.0
rainbof/nyxtools
java
## Code Before: package com.rainbof.nyxtools; import android.content.Context; public class NyxTools { private volatile static NyxTools mInstance; private NyxToolsPersistence persistence; public static NyxTools getInstance(Context _context) { if (mInstance == null) { synchronized (NyxTools.class) { mInstance = new NyxTools(_context); } } return mInstance; } protected NyxTools(Context _context) { if (_context.getApplicationContext() != null) _context = _context.getApplicationContext(); persistence = new NyxToolsPersistence(_context); } public boolean setUsername(String username) { return persistence.setUsername(username); } public String getUsername() { return persistence.getUsername(); } public boolean setAuthToken(String authToken) { return persistence.setAuthToken(authToken); } public String getAuthToken() { return persistence.getAuthToken(); } public String getUserAgentPrefix(){ return persistence.getUserAgentPrefix(); } public boolean setUserAgentPrefix(String uaPrefix){ return persistence.setUserAgentPrefix(uaPrefix); } } ## Instruction: Fix for calling getInstance before ApplicationContext is available ## Code After: package com.rainbof.nyxtools; import android.content.Context; import android.util.Log; import com.rainbof.nyxtools.util.S; public class NyxTools { private volatile static NyxTools mInstance; private NyxToolsPersistence persistence; public static NyxTools getInstance(Context _context) { if (mInstance == null) { synchronized (NyxTools.class) { mInstance = new NyxTools(_context); } } return mInstance; } protected NyxTools(Context _context) { try { if (_context.getApplicationContext() != null) _context = _context.getApplicationContext(); } catch (Exception e) { Log.e(S.TAG + "getApplicationContext", "You must call getInstance() after super.onCreate(...)"); throw new IllegalStateException(); } persistence = new NyxToolsPersistence(_context); } public boolean setUsername(String username) { return persistence.setUsername(username); } public String getUsername() { return persistence.getUsername(); } public boolean setAuthToken(String authToken) { return persistence.setAuthToken(authToken); } public String getAuthToken() { return persistence.getAuthToken(); } public String getUserAgentPrefix() { return persistence.getUserAgentPrefix(); } public boolean setUserAgentPrefix(String uaPrefix) { return persistence.setUserAgentPrefix(uaPrefix); } }
# ... existing code ... package com.rainbof.nyxtools; import android.content.Context; import android.util.Log; import com.rainbof.nyxtools.util.S; public class NyxTools { # ... modified code ... } protected NyxTools(Context _context) { try { if (_context.getApplicationContext() != null) _context = _context.getApplicationContext(); } catch (Exception e) { Log.e(S.TAG + "getApplicationContext", "You must call getInstance() after super.onCreate(...)"); throw new IllegalStateException(); } persistence = new NyxToolsPersistence(_context); } ... public String getAuthToken() { return persistence.getAuthToken(); } public String getUserAgentPrefix() { return persistence.getUserAgentPrefix(); } public boolean setUserAgentPrefix(String uaPrefix) { return persistence.setUserAgentPrefix(uaPrefix); } # ... rest of the code ...
b3b67fe0e68423fc2f85bccf1f20acdb779a38ba
pylxd/deprecated/tests/utils.py
pylxd/deprecated/tests/utils.py
from pylxd import api from pylxd import exceptions as lxd_exceptions def upload_image(image): alias = "{}/{}/{}/{}".format( image["os"], image["release"], image["arch"], image["variant"] ) lxd = api.API() imgs = api.API(host="images.linuxcontainers.org") d = imgs.alias_show(alias) meta = d[1]["metadata"] tgt = meta["target"] try: lxd.alias_update(meta) except lxd_exceptions.APIError as ex: if ex.status_code == 404: lxd.alias_create(meta) return tgt def delete_image(image): lxd = api.API() lxd.image_delete(image)
from pylxd import api def delete_image(image): lxd = api.API() lxd.image_delete(image)
Remove unused testing utility function
Remove unused testing utility function Signed-off-by: Dougal Matthews <[email protected]>
Python
apache-2.0
lxc/pylxd,lxc/pylxd
python
## Code Before: from pylxd import api from pylxd import exceptions as lxd_exceptions def upload_image(image): alias = "{}/{}/{}/{}".format( image["os"], image["release"], image["arch"], image["variant"] ) lxd = api.API() imgs = api.API(host="images.linuxcontainers.org") d = imgs.alias_show(alias) meta = d[1]["metadata"] tgt = meta["target"] try: lxd.alias_update(meta) except lxd_exceptions.APIError as ex: if ex.status_code == 404: lxd.alias_create(meta) return tgt def delete_image(image): lxd = api.API() lxd.image_delete(image) ## Instruction: Remove unused testing utility function Signed-off-by: Dougal Matthews <[email protected]> ## Code After: from pylxd import api def delete_image(image): lxd = api.API() lxd.image_delete(image)
# ... existing code ... from pylxd import api def delete_image(image): # ... rest of the code ...
1ddf35ebc48f1a0922c17399382ce679a51b2bda
src/main.h
src/main.h
// Libraries // #include "craftable.h" #include "resource.h" #include "villager.h" #include "location.h" // Forward Declarations // enum LOCATION adr_loc; enum FIRE_STATE adr_fire; enum ROOM_TEMP adr_temp; unsigned int adr_rs [ALIEN_ALLOY + 1]; unsigned short adr_cs [RIFLE + 1]; unsigned short adr_vs [MUNITIONIST + 1]; #endif // __ADR_MAIN_H__ // vim: set ts=4 sw=4 et:
// Libraries // #include "craftable.h" #include "resource.h" #include "villager.h" #include "location.h" // Forward Declarations // static enum LOCATION adr_loc; static enum FIRE_STATE adr_fire; static enum ROOM_TEMP adr_temp; static unsigned int adr_rs [ALIEN_ALLOY + 1]; static unsigned short adr_cs [RIFLE + 1]; static unsigned short adr_vs [MUNITIONIST + 1]; #endif // __ADR_MAIN_H__ // vim: set ts=4 sw=4 et:
Make all the state variables static (still subject to significant change)
Make all the state variables static (still subject to significant change)
C
mpl-2.0
HalosGhost/adarcroom
c
## Code Before: // Libraries // #include "craftable.h" #include "resource.h" #include "villager.h" #include "location.h" // Forward Declarations // enum LOCATION adr_loc; enum FIRE_STATE adr_fire; enum ROOM_TEMP adr_temp; unsigned int adr_rs [ALIEN_ALLOY + 1]; unsigned short adr_cs [RIFLE + 1]; unsigned short adr_vs [MUNITIONIST + 1]; #endif // __ADR_MAIN_H__ // vim: set ts=4 sw=4 et: ## Instruction: Make all the state variables static (still subject to significant change) ## Code After: // Libraries // #include "craftable.h" #include "resource.h" #include "villager.h" #include "location.h" // Forward Declarations // static enum LOCATION adr_loc; static enum FIRE_STATE adr_fire; static enum ROOM_TEMP adr_temp; static unsigned int adr_rs [ALIEN_ALLOY + 1]; static unsigned short adr_cs [RIFLE + 1]; static unsigned short adr_vs [MUNITIONIST + 1]; #endif // __ADR_MAIN_H__ // vim: set ts=4 sw=4 et:
# ... existing code ... #include "location.h" // Forward Declarations // static enum LOCATION adr_loc; static enum FIRE_STATE adr_fire; static enum ROOM_TEMP adr_temp; static unsigned int adr_rs [ALIEN_ALLOY + 1]; static unsigned short adr_cs [RIFLE + 1]; static unsigned short adr_vs [MUNITIONIST + 1]; #endif // __ADR_MAIN_H__ // vim: set ts=4 sw=4 et: # ... rest of the code ...
dd11bcd4011ba911642b2e13d0db2440f749afa1
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup Version = "0.01" setup(name = "coverage-reporter", version = Version, description = "Coverage reporting tool", long_description="Allows more complicated reporting of information from figleaf and other coverage tools", author = "David Christian", author_email = "[email protected]", url = "http://github.org/dugan/coverage-reporter/", packages = [ 'coverage_reporter', 'coverage_reporter.filters', 'coverage_reporter.collectors', 'coverage_reporter.reports' ], license = 'BSD', scripts = ['scripts/coverage-reporter'], platforms = 'Posix; MacOS X; Windows', classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Topic :: Development', ], )
try: from setuptools import setup except ImportError: from distutils.core import setup Version = "0.01" setup(name = "coverage-reporter", version = Version, description = "Coverage reporting tool", long_description="Allows more complicated reporting of information from figleaf and other coverage tools", author = "David Christian", author_email = "[email protected]", url = "http://github.org/dugan/coverage-reporter/", packages = [ 'coverage_reporter', 'coverage_reporter.filters', 'coverage_reporter.collectors', 'coverage_reporter.reports' ], license = 'MIT', scripts = ['scripts/coverage-reporter'], platforms = 'Posix; MacOS X; Windows', classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', ], )
Mark this project as using the MIT License.
Mark this project as using the MIT License.
Python
mit
dugan/coverage-reporter
python
## Code Before: try: from setuptools import setup except ImportError: from distutils.core import setup Version = "0.01" setup(name = "coverage-reporter", version = Version, description = "Coverage reporting tool", long_description="Allows more complicated reporting of information from figleaf and other coverage tools", author = "David Christian", author_email = "[email protected]", url = "http://github.org/dugan/coverage-reporter/", packages = [ 'coverage_reporter', 'coverage_reporter.filters', 'coverage_reporter.collectors', 'coverage_reporter.reports' ], license = 'BSD', scripts = ['scripts/coverage-reporter'], platforms = 'Posix; MacOS X; Windows', classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Topic :: Development', ], ) ## Instruction: Mark this project as using the MIT License. ## Code After: try: from setuptools import setup except ImportError: from distutils.core import setup Version = "0.01" setup(name = "coverage-reporter", version = Version, description = "Coverage reporting tool", long_description="Allows more complicated reporting of information from figleaf and other coverage tools", author = "David Christian", author_email = "[email protected]", url = "http://github.org/dugan/coverage-reporter/", packages = [ 'coverage_reporter', 'coverage_reporter.filters', 'coverage_reporter.collectors', 'coverage_reporter.reports' ], license = 'MIT', scripts = ['scripts/coverage-reporter'], platforms = 'Posix; MacOS X; Windows', classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', ], )
... author_email = "[email protected]", url = "http://github.org/dugan/coverage-reporter/", packages = [ 'coverage_reporter', 'coverage_reporter.filters', 'coverage_reporter.collectors', 'coverage_reporter.reports' ], license = 'MIT', scripts = ['scripts/coverage-reporter'], platforms = 'Posix; MacOS X; Windows', classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', ], ) ...
924a28fdc53a661053abb77688fa5bbc8078f338
musicianlibrary-common/src/main/java/io/musician101/musicianlibrary/java/minecraft/gui/chest/ChestGUI.java
musicianlibrary-common/src/main/java/io/musician101/musicianlibrary/java/minecraft/gui/chest/ChestGUI.java
package io.musician101.musicianlibrary.java.minecraft.gui.chest; import java.util.List; public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> { protected final List<GUIButton<C, G, I, J, P, S>> buttons; protected final I inventory; protected final int page; protected final P player; protected final J plugin; protected final G prevGUI; protected ChestGUI(I inventory, P player, int page, List<GUIButton<C, G, I, J, P, S>> buttons, G prevGUI, J plugin, boolean manualOpen) { this.inventory = inventory; this.player = player; this.page = page; this.buttons = buttons; this.prevGUI = prevGUI; this.plugin = plugin; if (!manualOpen) { open(); } } public abstract void close(); public abstract void open(); }
package io.musician101.musicianlibrary.java.minecraft.gui.chest; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> { @Nonnull protected final List<GUIButton<C, G, I, J, P, S>> buttons; @Nonnull protected final I inventory; protected final int page; @Nonnull protected final P player; @Nonnull protected final J plugin; @Nullable protected final G prevGUI; protected ChestGUI(@Nonnull I inventory, @Nonnull P player, int page, @Nonnull List<GUIButton<C, G, I, J, P, S>> buttons, @Nullable G prevGUI, @Nonnull J plugin, boolean manualOpen) { this.inventory = inventory; this.player = player; this.page = page; this.buttons = buttons; this.prevGUI = prevGUI; this.plugin = plugin; if (!manualOpen) { open(); } } public abstract void close(); @Nullable public G getPreviousGUI() { return prevGUI; } public abstract void open(); }
Add a method to get the previous GUI.
Add a method to get the previous GUI.
Java
mit
Musician101/Common
java
## Code Before: package io.musician101.musicianlibrary.java.minecraft.gui.chest; import java.util.List; public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> { protected final List<GUIButton<C, G, I, J, P, S>> buttons; protected final I inventory; protected final int page; protected final P player; protected final J plugin; protected final G prevGUI; protected ChestGUI(I inventory, P player, int page, List<GUIButton<C, G, I, J, P, S>> buttons, G prevGUI, J plugin, boolean manualOpen) { this.inventory = inventory; this.player = player; this.page = page; this.buttons = buttons; this.prevGUI = prevGUI; this.plugin = plugin; if (!manualOpen) { open(); } } public abstract void close(); public abstract void open(); } ## Instruction: Add a method to get the previous GUI. ## Code After: package io.musician101.musicianlibrary.java.minecraft.gui.chest; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> { @Nonnull protected final List<GUIButton<C, G, I, J, P, S>> buttons; @Nonnull protected final I inventory; protected final int page; @Nonnull protected final P player; @Nonnull protected final J plugin; @Nullable protected final G prevGUI; protected ChestGUI(@Nonnull I inventory, @Nonnull P player, int page, @Nonnull List<GUIButton<C, G, I, J, P, S>> buttons, @Nullable G prevGUI, @Nonnull J plugin, boolean manualOpen) { this.inventory = inventory; this.player = player; this.page = page; this.buttons = buttons; this.prevGUI = prevGUI; this.plugin = plugin; if (!manualOpen) { open(); } } public abstract void close(); @Nullable public G getPreviousGUI() { return prevGUI; } public abstract void open(); }
# ... existing code ... package io.musician101.musicianlibrary.java.minecraft.gui.chest; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; public abstract class ChestGUI<C, G extends ChestGUI<C, G, I, J, P, S>, I, J, P, S> { @Nonnull protected final List<GUIButton<C, G, I, J, P, S>> buttons; @Nonnull protected final I inventory; protected final int page; @Nonnull protected final P player; @Nonnull protected final J plugin; @Nullable protected final G prevGUI; protected ChestGUI(@Nonnull I inventory, @Nonnull P player, int page, @Nonnull List<GUIButton<C, G, I, J, P, S>> buttons, @Nullable G prevGUI, @Nonnull J plugin, boolean manualOpen) { this.inventory = inventory; this.player = player; this.page = page; # ... modified code ... public abstract void close(); @Nullable public G getPreviousGUI() { return prevGUI; } public abstract void open(); } # ... rest of the code ...
131fb74b0f399ad3abff5dcc2b09621cac1226e7
config/nox_routing.py
config/nox_routing.py
from experiment_config_lib import ControllerConfig from sts.control_flow import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use NOX as our controller command_line = "./nox_core -i ptcp:6633 routing" controllers = [ControllerConfig(command_line, cwd="nox_classic/build/src", address="127.0.0.1", port=6633)] dataplane_trace = "dataplane_traces/ping_pong_fat_tree.trace" simulation_config = SimulationConfig(controller_configs=controllers, dataplane_trace=dataplane_trace) # Use a Fuzzer (already the default) control_flow = Fuzzer(simulation_config, input_logger=InputLogger(), check_interval=80, invariant_check=InvariantChecker.check_connectivity)
from experiment_config_lib import ControllerConfig from sts.control_flow import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig from sts.topology import MeshTopology # Use NOX as our controller command_line = "./nox_core -v -i ptcp:6633 sample_routing" controllers = [ControllerConfig(command_line, cwd="nox_classic/build/src", address="127.0.0.1", port=6633)] topology_class = MeshTopology topology_params = "num_switches=4" dataplane_trace = "dataplane_traces/ping_pong_same_subnet_4_switches.trace" # dataplane_trace = "dataplane_traces/ping_pong_fat_tree.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace) #simulation_config = SimulationConfig(controller_configs=controllers, # dataplane_trace=dataplane_trace) # Use a Fuzzer (already the default) control_flow = Fuzzer(simulation_config, input_logger=InputLogger(), check_interval=80, invariant_check=InvariantChecker.check_connectivity)
Update NOX config to use sample_routing
Update NOX config to use sample_routing
Python
apache-2.0
ucb-sts/sts,jmiserez/sts,jmiserez/sts,ucb-sts/sts
python
## Code Before: from experiment_config_lib import ControllerConfig from sts.control_flow import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig # Use NOX as our controller command_line = "./nox_core -i ptcp:6633 routing" controllers = [ControllerConfig(command_line, cwd="nox_classic/build/src", address="127.0.0.1", port=6633)] dataplane_trace = "dataplane_traces/ping_pong_fat_tree.trace" simulation_config = SimulationConfig(controller_configs=controllers, dataplane_trace=dataplane_trace) # Use a Fuzzer (already the default) control_flow = Fuzzer(simulation_config, input_logger=InputLogger(), check_interval=80, invariant_check=InvariantChecker.check_connectivity) ## Instruction: Update NOX config to use sample_routing ## Code After: from experiment_config_lib import ControllerConfig from sts.control_flow import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig from sts.topology import MeshTopology # Use NOX as our controller command_line = "./nox_core -v -i ptcp:6633 sample_routing" controllers = [ControllerConfig(command_line, cwd="nox_classic/build/src", address="127.0.0.1", port=6633)] topology_class = MeshTopology topology_params = "num_switches=4" dataplane_trace = "dataplane_traces/ping_pong_same_subnet_4_switches.trace" # dataplane_trace = "dataplane_traces/ping_pong_fat_tree.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace) #simulation_config = SimulationConfig(controller_configs=controllers, # dataplane_trace=dataplane_trace) # Use a Fuzzer (already the default) control_flow = Fuzzer(simulation_config, input_logger=InputLogger(), check_interval=80, invariant_check=InvariantChecker.check_connectivity)
... from sts.input_traces.input_logger import InputLogger from sts.invariant_checker import InvariantChecker from sts.simulation_state import SimulationConfig from sts.topology import MeshTopology # Use NOX as our controller command_line = "./nox_core -v -i ptcp:6633 sample_routing" controllers = [ControllerConfig(command_line, cwd="nox_classic/build/src", address="127.0.0.1", port=6633)] topology_class = MeshTopology topology_params = "num_switches=4" dataplane_trace = "dataplane_traces/ping_pong_same_subnet_4_switches.trace" # dataplane_trace = "dataplane_traces/ping_pong_fat_tree.trace" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, dataplane_trace=dataplane_trace) #simulation_config = SimulationConfig(controller_configs=controllers, # dataplane_trace=dataplane_trace) # Use a Fuzzer (already the default) control_flow = Fuzzer(simulation_config, input_logger=InputLogger(), ...
ee61414cb53dd883d9f5ab60b0148bf0ed9bf3d7
us_ignite/people/tests/integration_tests.py
us_ignite/people/tests/integration_tests.py
from nose.tools import eq_ from django.contrib.auth.models import User from django.test import TestCase from django_nose.tools import assert_redirects from us_ignite.common.tests import utils from us_ignite.profiles.tests import fixtures def _teardown_profiles(): for model in [User]: model.objects.all().delete() class TestPeopleListUnauthenticated(TestCase): def test_people_list_requires_auth(self): url = '/people/' response = self.client.get(url) assert_redirects(response, utils.get_login_url(url)) def test_people_list_is_successful(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') self.client.login(username='us-ignite', password='us-ignite') response = self.client.get('/people/') eq_(response.status_code, 200) self.client.logout() _teardown_profiles() class TestPeopleListPage(TestCase): def test_people_page_detail_is_successful(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') self.client.login(username='us-ignite', password='us-ignite') response = self.client.get('/people/ignite/') eq_(response.status_code, 200) self.client.logout() _teardown_profiles() def test_people_page_detail_requires_auth(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') response = self.client.get('/people/ignite/') assert_redirects(response, utils.get_login_url('/people/ignite/')) _teardown_profiles()
from nose.tools import eq_ from django.contrib.auth.models import User from django.test import TestCase from django_nose.tools import assert_redirects from us_ignite.common.tests import utils from us_ignite.profiles.tests import fixtures def _teardown_profiles(): for model in [User]: model.objects.all().delete() class TestPeopleDetailPage(TestCase): def test_people_page_detail_is_successful(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') self.client.login(username='us-ignite', password='us-ignite') response = self.client.get('/people/ignite/') eq_(response.status_code, 200) self.client.logout() _teardown_profiles() def test_people_page_detail_requires_auth(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') response = self.client.get('/people/ignite/') assert_redirects(response, utils.get_login_url('/people/ignite/')) _teardown_profiles()
Update tests to refelct the removal of the users list view.
Update tests to refelct the removal of the users list view.
Python
bsd-3-clause
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
python
## Code Before: from nose.tools import eq_ from django.contrib.auth.models import User from django.test import TestCase from django_nose.tools import assert_redirects from us_ignite.common.tests import utils from us_ignite.profiles.tests import fixtures def _teardown_profiles(): for model in [User]: model.objects.all().delete() class TestPeopleListUnauthenticated(TestCase): def test_people_list_requires_auth(self): url = '/people/' response = self.client.get(url) assert_redirects(response, utils.get_login_url(url)) def test_people_list_is_successful(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') self.client.login(username='us-ignite', password='us-ignite') response = self.client.get('/people/') eq_(response.status_code, 200) self.client.logout() _teardown_profiles() class TestPeopleListPage(TestCase): def test_people_page_detail_is_successful(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') self.client.login(username='us-ignite', password='us-ignite') response = self.client.get('/people/ignite/') eq_(response.status_code, 200) self.client.logout() _teardown_profiles() def test_people_page_detail_requires_auth(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') response = self.client.get('/people/ignite/') assert_redirects(response, utils.get_login_url('/people/ignite/')) _teardown_profiles() ## Instruction: Update tests to refelct the removal of the users list view. ## Code After: from nose.tools import eq_ from django.contrib.auth.models import User from django.test import TestCase from django_nose.tools import assert_redirects from us_ignite.common.tests import utils from us_ignite.profiles.tests import fixtures def _teardown_profiles(): for model in [User]: model.objects.all().delete() class TestPeopleDetailPage(TestCase): def test_people_page_detail_is_successful(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') self.client.login(username='us-ignite', password='us-ignite') response = self.client.get('/people/ignite/') eq_(response.status_code, 200) self.client.logout() _teardown_profiles() def test_people_page_detail_requires_auth(self): user = fixtures.get_user('us-ignite') fixtures.get_profile(user=user, name='us ignite', slug='ignite') response = self.client.get('/people/ignite/') assert_redirects(response, utils.get_login_url('/people/ignite/')) _teardown_profiles()
# ... existing code ... model.objects.all().delete() class TestPeopleDetailPage(TestCase): def test_people_page_detail_is_successful(self): user = fixtures.get_user('us-ignite') # ... rest of the code ...
b66d8c2d43a28ce6e0824543bd879dc3528e3509
rest/available-phone-numbers/local-basic-example-1/local-get-basic-example-1.6.x.py
rest/available-phone-numbers/local-basic-example-1/local-get-basic-example-1.6.x.py
from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/user/account account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" auth_token = "your_auth_token" client = Client(account_sid, auth_token) numbers = client.available_phone_numbers("US") \ .local \ .list(area_code="510") number = client.incoming_phone_numbers \ .create(phone_number=numbers[0].phone_number) print(number.sid)
from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/user/account account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" auth_token = "your_auth_token" client = Client(account_sid, auth_token) numbers = client.available_phone_numbers("US") \ .local \ .list(area_code="510") # Purchase the phone number number = client.incoming_phone_numbers \ .create(phone_number=numbers[0].phone_number) print(number.sid)
Add a comment about purchasing the phone number
Add a comment about purchasing the phone number
Python
mit
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets
python
## Code Before: from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/user/account account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" auth_token = "your_auth_token" client = Client(account_sid, auth_token) numbers = client.available_phone_numbers("US") \ .local \ .list(area_code="510") number = client.incoming_phone_numbers \ .create(phone_number=numbers[0].phone_number) print(number.sid) ## Instruction: Add a comment about purchasing the phone number ## Code After: from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/user/account account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" auth_token = "your_auth_token" client = Client(account_sid, auth_token) numbers = client.available_phone_numbers("US") \ .local \ .list(area_code="510") # Purchase the phone number number = client.incoming_phone_numbers \ .create(phone_number=numbers[0].phone_number) print(number.sid)
... .local \ .list(area_code="510") # Purchase the phone number number = client.incoming_phone_numbers \ .create(phone_number=numbers[0].phone_number) ...
2721a3d5c8bfcf3a6945e8744e4887688578ce9f
tests/test_emailharvesterws.py
tests/test_emailharvesterws.py
import pytest from botanick import Botanick def test_botanick(): emails_found = Botanick.search("squad.pro") assert emails_found != "" print(emails_found)
from botanick import Botanick def test_botanick(): emails_found = Botanick.search("squad.pro") assert emails_found != ""
Revert "Revert "Fix a codacy issue""
Revert "Revert "Fix a codacy issue"" This reverts commit 6551c882745b13d5b9be183e83f379e34b067921.
Python
mit
avidot/Botanick
python
## Code Before: import pytest from botanick import Botanick def test_botanick(): emails_found = Botanick.search("squad.pro") assert emails_found != "" print(emails_found) ## Instruction: Revert "Revert "Fix a codacy issue"" This reverts commit 6551c882745b13d5b9be183e83f379e34b067921. ## Code After: from botanick import Botanick def test_botanick(): emails_found = Botanick.search("squad.pro") assert emails_found != ""
// ... existing code ... from botanick import Botanick // ... modified code ... def test_botanick(): emails_found = Botanick.search("squad.pro") assert emails_found != "" // ... rest of the code ...
24ca48098777d89835cf169ee2b4f06db50ec9f1
koans/triangle.py
koans/triangle.py
def triangle(a, b, c): if (a <= 0 or b <= 0 and c <= 0): raise TriangleError() if (a == b and b == c and c == a): return 'equilateral' elif (a == b or b == c or c == a): return 'isosceles' elif (a != b and b != c and c != a): return 'scalene' # Error class used in part 2. No need to change this code. class TriangleError(Exception): pass
def triangle(a, b, c): if (a <= 0 or b <= 0 and c <= 0): raise TriangleError() if (a == b and b == c): return 'equilateral' elif (a == b or b == c or c == a): return 'isosceles' else: return 'scalene' # Error class used in part 2. No need to change this code. class TriangleError(Exception): pass
Simplify logic conditionals as tests still pass.
Simplify logic conditionals as tests still pass.
Python
mit
javierjulio/python-koans-completed,javierjulio/python-koans-completed
python
## Code Before: def triangle(a, b, c): if (a <= 0 or b <= 0 and c <= 0): raise TriangleError() if (a == b and b == c and c == a): return 'equilateral' elif (a == b or b == c or c == a): return 'isosceles' elif (a != b and b != c and c != a): return 'scalene' # Error class used in part 2. No need to change this code. class TriangleError(Exception): pass ## Instruction: Simplify logic conditionals as tests still pass. ## Code After: def triangle(a, b, c): if (a <= 0 or b <= 0 and c <= 0): raise TriangleError() if (a == b and b == c): return 'equilateral' elif (a == b or b == c or c == a): return 'isosceles' else: return 'scalene' # Error class used in part 2. No need to change this code. class TriangleError(Exception): pass
... if (a <= 0 or b <= 0 and c <= 0): raise TriangleError() if (a == b and b == c): return 'equilateral' elif (a == b or b == c or c == a): return 'isosceles' else: return 'scalene' # Error class used in part 2. No need to change this code. ...
ccd5dce9b4e2392abdf205d2913736a6ce47ae0b
setup.py
setup.py
from setuptools import setup, find_packages setup( name="dj-redis-url", version="0.1.2", description="Use Redis URLs in your Django Application.", long_description=__doc__, url="https://github.com/dstufft/dj-redis-url", author="Donald Stufft", author_email="[email protected]", extras_require={ "tests": ["pytest"], }, packages=find_packages(exclude=["tests"]), package_data={"": ["LICENSE"]}, include_package_data=True, zip_safe=False, )
from setuptools import setup setup( name="dj-redis-url", version="0.1.2", description="Use Redis URLs in your Django Application.", long_description=__doc__, url="https://github.com/dstufft/dj-redis-url", author="Donald Stufft", author_email="[email protected]", extras_require={ "tests": ["pytest"], }, py_modules=["dj_redis_url"], include_package_data=True, zip_safe=False, )
Switch from packages to modules
Switch from packages to modules
Python
bsd-2-clause
dstufft/dj-redis-url
python
## Code Before: from setuptools import setup, find_packages setup( name="dj-redis-url", version="0.1.2", description="Use Redis URLs in your Django Application.", long_description=__doc__, url="https://github.com/dstufft/dj-redis-url", author="Donald Stufft", author_email="[email protected]", extras_require={ "tests": ["pytest"], }, packages=find_packages(exclude=["tests"]), package_data={"": ["LICENSE"]}, include_package_data=True, zip_safe=False, ) ## Instruction: Switch from packages to modules ## Code After: from setuptools import setup setup( name="dj-redis-url", version="0.1.2", description="Use Redis URLs in your Django Application.", long_description=__doc__, url="https://github.com/dstufft/dj-redis-url", author="Donald Stufft", author_email="[email protected]", extras_require={ "tests": ["pytest"], }, py_modules=["dj_redis_url"], include_package_data=True, zip_safe=False, )
... from setuptools import setup setup( name="dj-redis-url", ... "tests": ["pytest"], }, py_modules=["dj_redis_url"], include_package_data=True, zip_safe=False, ...
68f50e83f4b06d3e45bfe1610d50d88e73bde8af
examples/load_table_from_url.py
examples/load_table_from_url.py
from __future__ import print_function from __future__ import unicode_literals import pytablereader print("\n".join([ "load from URL", "==============", ])) loader = pytablereader.TableUrlLoader( "https://en.wikipedia.org/wiki/List_of_unit_testing_frameworks", "html") with open("hoge.rst", "w", encoding="utf-8") as f: for table_data in loader.load(): print("{:s}".format(table_data.dumps())) f.write(table_data.dumps())
from __future__ import print_function from __future__ import unicode_literals import io import pytablereader print("\n".join([ "load from URL", "==============", ])) loader = pytablereader.TableUrlLoader( "https://en.wikipedia.org/wiki/List_of_unit_testing_frameworks", "html") with io.open("hoge.rst", "w", encoding=loader.encoding) as f: for table_data in loader.load(): print("{:s}".format(table_data.dumps())) f.write(table_data.dumps())
Fix for python 2 compatibility
Fix for python 2 compatibility
Python
mit
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
python
## Code Before: from __future__ import print_function from __future__ import unicode_literals import pytablereader print("\n".join([ "load from URL", "==============", ])) loader = pytablereader.TableUrlLoader( "https://en.wikipedia.org/wiki/List_of_unit_testing_frameworks", "html") with open("hoge.rst", "w", encoding="utf-8") as f: for table_data in loader.load(): print("{:s}".format(table_data.dumps())) f.write(table_data.dumps()) ## Instruction: Fix for python 2 compatibility ## Code After: from __future__ import print_function from __future__ import unicode_literals import io import pytablereader print("\n".join([ "load from URL", "==============", ])) loader = pytablereader.TableUrlLoader( "https://en.wikipedia.org/wiki/List_of_unit_testing_frameworks", "html") with io.open("hoge.rst", "w", encoding=loader.encoding) as f: for table_data in loader.load(): print("{:s}".format(table_data.dumps())) f.write(table_data.dumps())
// ... existing code ... from __future__ import print_function from __future__ import unicode_literals import io import pytablereader print("\n".join([ // ... modified code ... "https://en.wikipedia.org/wiki/List_of_unit_testing_frameworks", "html") with io.open("hoge.rst", "w", encoding=loader.encoding) as f: for table_data in loader.load(): print("{:s}".format(table_data.dumps())) f.write(table_data.dumps()) // ... rest of the code ...
c713273fe145418113d750579f8b135dc513c3b8
config.py
config.py
import os if os.environ.get('DATABASE_URL') is None: SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db' else: SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
import os SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
Delete default case for SQLALCHEMY_DATABASE_URI
Delete default case for SQLALCHEMY_DATABASE_URI if user doesn't set it, he coud have some problems with SQLite
Python
mit
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
python
## Code Before: import os if os.environ.get('DATABASE_URL') is None: SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db' else: SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning ## Instruction: Delete default case for SQLALCHEMY_DATABASE_URI if user doesn't set it, he coud have some problems with SQLite ## Code After: import os SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
// ... existing code ... import os SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning // ... rest of the code ...
e87e136dd590134b7be6f5d04aebeed719880c9e
paasta_tools/paasta_native_serviceinit.py
paasta_tools/paasta_native_serviceinit.py
from __future__ import absolute_import from __future__ import unicode_literals from paasta_tools import native_mesos_scheduler from paasta_tools.mesos_tools import status_mesos_tasks_verbose from paasta_tools.utils import calculate_tail_lines from paasta_tools.utils import compose_job_id from paasta_tools.utils import paasta_print def perform_command(command, service, instance, cluster, verbose, soa_dir): if verbose > 0: tail_lines = calculate_tail_lines(verbose_level=verbose) else: tail_lines = 0 # We have to add a spacer at the end to make sure we only return # things for service.main and not service.main_foo task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER) if command == 'status': paasta_print(status_mesos_tasks_verbose( job_id=task_id_prefix, get_short_task_id=lambda x: x, tail_lines=tail_lines, ))
from __future__ import absolute_import from __future__ import unicode_literals from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER from paasta_tools.mesos_tools import status_mesos_tasks_verbose from paasta_tools.utils import calculate_tail_lines from paasta_tools.utils import compose_job_id from paasta_tools.utils import paasta_print def perform_command(command, service, instance, cluster, verbose, soa_dir): if verbose > 0: tail_lines = calculate_tail_lines(verbose_level=verbose) else: tail_lines = 0 # We have to add a spacer at the end to make sure we only return # things for service.main and not service.main_foo task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER) if command == 'status': paasta_print(status_mesos_tasks_verbose( job_id=task_id_prefix, get_short_task_id=lambda x: x, tail_lines=tail_lines, ))
Fix broken import in native scheduler
Fix broken import in native scheduler
Python
apache-2.0
Yelp/paasta,somic/paasta,Yelp/paasta,somic/paasta
python
## Code Before: from __future__ import absolute_import from __future__ import unicode_literals from paasta_tools import native_mesos_scheduler from paasta_tools.mesos_tools import status_mesos_tasks_verbose from paasta_tools.utils import calculate_tail_lines from paasta_tools.utils import compose_job_id from paasta_tools.utils import paasta_print def perform_command(command, service, instance, cluster, verbose, soa_dir): if verbose > 0: tail_lines = calculate_tail_lines(verbose_level=verbose) else: tail_lines = 0 # We have to add a spacer at the end to make sure we only return # things for service.main and not service.main_foo task_id_prefix = "%s%s" % (compose_job_id(service, instance), native_mesos_scheduler.MESOS_TASK_SPACER) if command == 'status': paasta_print(status_mesos_tasks_verbose( job_id=task_id_prefix, get_short_task_id=lambda x: x, tail_lines=tail_lines, )) ## Instruction: Fix broken import in native scheduler ## Code After: from __future__ import absolute_import from __future__ import unicode_literals from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER from paasta_tools.mesos_tools import status_mesos_tasks_verbose from paasta_tools.utils import calculate_tail_lines from paasta_tools.utils import compose_job_id from paasta_tools.utils import paasta_print def perform_command(command, service, instance, cluster, verbose, soa_dir): if verbose > 0: tail_lines = calculate_tail_lines(verbose_level=verbose) else: tail_lines = 0 # We have to add a spacer at the end to make sure we only return # things for service.main and not service.main_foo task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER) if command == 'status': paasta_print(status_mesos_tasks_verbose( job_id=task_id_prefix, get_short_task_id=lambda x: x, tail_lines=tail_lines, ))
// ... existing code ... from __future__ import absolute_import from __future__ import unicode_literals from paasta_tools.frameworks.native_scheduler import MESOS_TASK_SPACER from paasta_tools.mesos_tools import status_mesos_tasks_verbose from paasta_tools.utils import calculate_tail_lines from paasta_tools.utils import compose_job_id // ... modified code ... # We have to add a spacer at the end to make sure we only return # things for service.main and not service.main_foo task_id_prefix = "%s%s" % (compose_job_id(service, instance), MESOS_TASK_SPACER) if command == 'status': paasta_print(status_mesos_tasks_verbose( // ... rest of the code ...
3f1e0843e3afe5c98a786d6fc51af8d050e20838
annotation-file-utilities/src/annotator/find/CloseParenthesisInsertion.java
annotation-file-utilities/src/annotator/find/CloseParenthesisInsertion.java
package annotator.find; /** * This insertion adds two closing parentheses to close the unclosed parentheses * left by a {@link CastInsertion}. This should be inserted after the expression * that's being casted. */ public class CloseParenthesisInsertion extends Insertion { public CloseParenthesisInsertion(Criteria criteria, boolean separateLine) { super("))", criteria, separateLine); } /** {@inheritDoc} */ @Override public String getText(boolean comments, boolean abbreviate) { return super.getText(false, false); } }
package annotator.find; /** * This insertion adds two closing parentheses to close the unclosed parentheses * left by a {@link CastInsertion}. This should be inserted after the expression * that's being casted. */ public class CloseParenthesisInsertion extends Insertion { public CloseParenthesisInsertion(Criteria criteria, boolean separateLine) { super("", criteria, separateLine); } /** {@inheritDoc} */ @Override public String getText(boolean comments, boolean abbreviate) { return "))"; } }
Fix bug in close parenthesis insertion.
Fix bug in close parenthesis insertion.
Java
mit
eisop/annotation-tools,typetools/annotation-tools,eisop/annotation-tools,eisop/annotation-tools,typetools/annotation-tools,typetools/annotation-tools
java
## Code Before: package annotator.find; /** * This insertion adds two closing parentheses to close the unclosed parentheses * left by a {@link CastInsertion}. This should be inserted after the expression * that's being casted. */ public class CloseParenthesisInsertion extends Insertion { public CloseParenthesisInsertion(Criteria criteria, boolean separateLine) { super("))", criteria, separateLine); } /** {@inheritDoc} */ @Override public String getText(boolean comments, boolean abbreviate) { return super.getText(false, false); } } ## Instruction: Fix bug in close parenthesis insertion. ## Code After: package annotator.find; /** * This insertion adds two closing parentheses to close the unclosed parentheses * left by a {@link CastInsertion}. This should be inserted after the expression * that's being casted. */ public class CloseParenthesisInsertion extends Insertion { public CloseParenthesisInsertion(Criteria criteria, boolean separateLine) { super("", criteria, separateLine); } /** {@inheritDoc} */ @Override public String getText(boolean comments, boolean abbreviate) { return "))"; } }
# ... existing code ... public CloseParenthesisInsertion(Criteria criteria, boolean separateLine) { super("", criteria, separateLine); } /** {@inheritDoc} */ @Override public String getText(boolean comments, boolean abbreviate) { return "))"; } } # ... rest of the code ...
0eb7e39c726ced0e802de925c7ce3b3ec35c61d9
src/billing/factories.py
src/billing/factories.py
import factory import random from billing.models import Billing, OrderBilling from member.factories import ClientFactory from order.factories import OrderFactory class BillingFactory(factory.DjangoModelFactory): class Meta: model = Billing client = factory.SubFactory(ClientFactory) total_amount = random.randrange(1, stop=75, step=1) billing_month = random.randrange(1, stop=12, step=1) billing_year = random.randrange(2016, stop=2020, step=1) detail = {"123": 123} class BillingOrder(factory.DjangoModelFactory): billing_id = BillingFactory().id order_id = OrderFactory()
import factory import random from billing.models import Billing, OrderBilling from member.factories import ClientFactory from order.factories import OrderFactory class BillingFactory(factory.DjangoModelFactory): class Meta: model = Billing client = factory.SubFactory(ClientFactory) total_amount = random.randrange(1, stop=75, step=1) billing_month = random.randrange(1, stop=12, step=1) billing_year = random.randrange(2016, stop=2020, step=1) detail = {"123": 123}
Remove a BillingOrder factory class that wasn't use
Remove a BillingOrder factory class that wasn't use There was a problem with this class... but since I couldn't find code using it, I simply deleted it.
Python
agpl-3.0
savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/sous-chef
python
## Code Before: import factory import random from billing.models import Billing, OrderBilling from member.factories import ClientFactory from order.factories import OrderFactory class BillingFactory(factory.DjangoModelFactory): class Meta: model = Billing client = factory.SubFactory(ClientFactory) total_amount = random.randrange(1, stop=75, step=1) billing_month = random.randrange(1, stop=12, step=1) billing_year = random.randrange(2016, stop=2020, step=1) detail = {"123": 123} class BillingOrder(factory.DjangoModelFactory): billing_id = BillingFactory().id order_id = OrderFactory() ## Instruction: Remove a BillingOrder factory class that wasn't use There was a problem with this class... but since I couldn't find code using it, I simply deleted it. ## Code After: import factory import random from billing.models import Billing, OrderBilling from member.factories import ClientFactory from order.factories import OrderFactory class BillingFactory(factory.DjangoModelFactory): class Meta: model = Billing client = factory.SubFactory(ClientFactory) total_amount = random.randrange(1, stop=75, step=1) billing_month = random.randrange(1, stop=12, step=1) billing_year = random.randrange(2016, stop=2020, step=1) detail = {"123": 123}
... billing_year = random.randrange(2016, stop=2020, step=1) detail = {"123": 123} ...
0281aaa0868d0bfa6ecb7368cff89b4af6b57129
tests/functions_tests/test_dropout.py
tests/functions_tests/test_dropout.py
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) try: functions.dropout(x) except Exception: self.fail() def test_type_forward_cpu(self): self.check_type_forward(self.x) def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__)
import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) try: functions.dropout(x) except Exception: self.fail() def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__)
Add attr.gpu decorator to gpu test of dropout
Add attr.gpu decorator to gpu test of dropout
Python
mit
yanweifu/chainer,hvy/chainer,cupy/cupy,ysekky/chainer,woodshop/complex-chainer,niboshi/chainer,tkerola/chainer,kashif/chainer,kikusu/chainer,jnishi/chainer,okuta/chainer,niboshi/chainer,benob/chainer,chainer/chainer,AlpacaDB/chainer,sou81821/chainer,umitanuki/chainer,tscohen/chainer,cupy/cupy,laysakura/chainer,masia02/chainer,jfsantos/chainer,anaruse/chainer,keisuke-umezawa/chainer,truongdq/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,aonotas/chainer,ikasumi/chainer,kikusu/chainer,AlpacaDB/chainer,sinhrks/chainer,sinhrks/chainer,rezoo/chainer,okuta/chainer,jnishi/chainer,1986ks/chainer,muupan/chainer,ytoyama/yans_chainer_hackathon,minhpqn/chainer,wavelets/chainer,muupan/chainer,ktnyt/chainer,cemoody/chainer,ktnyt/chainer,cupy/cupy,kuwa32/chainer,ktnyt/chainer,hvy/chainer,jnishi/chainer,niboshi/chainer,Kaisuke5/chainer,tigerneil/chainer,ronekko/chainer,cupy/cupy,truongdq/chainer,pfnet/chainer,t-abe/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,t-abe/chainer,keisuke-umezawa/chainer,woodshop/chainer,jnishi/chainer,wkentaro/chainer,hidenori-t/chainer,elviswf/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,benob/chainer,delta2323/chainer,kiyukuta/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer
python
## Code Before: import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) try: functions.dropout(x) except Exception: self.fail() def test_type_forward_cpu(self): self.check_type_forward(self.x) def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__) ## Instruction: Add attr.gpu decorator to gpu test of dropout ## Code After: import unittest import numpy import chainer from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: cuda.init() class TestDropout(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32) def check_type_forward(self, x_data): x = chainer.Variable(x_data) try: functions.dropout(x) except Exception: self.fail() def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) testing.run_module(__name__, __file__)
// ... existing code ... from chainer import cuda from chainer import functions from chainer import testing from chainer.testing import attr if cuda.available: // ... modified code ... def test_type_forward_cpu(self): self.check_type_forward(self.x) @attr.gpu def test_type_forward_gpu(self): self.check_type_forward(cuda.to_gpu(self.x)) // ... rest of the code ...
d77f4a5c0ccca01d0d624d27aaa7b9e09bda3e16
src/nl/rubensten/texifyidea/structure/LatexOtherCommandPresentation.java
src/nl/rubensten/texifyidea/structure/LatexOtherCommandPresentation.java
package nl.rubensten.texifyidea.structure; import nl.rubensten.texifyidea.psi.LatexCommands; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author Ruben Schellekens */ public class LatexOtherCommandPresentation implements EditableHintPresentation { private final String commandName; private final Icon icon; private String hint = ""; public LatexOtherCommandPresentation(LatexCommands command, Icon icon) { this.commandName = command.getName(); this.icon = icon; } @Nullable @Override public String getPresentableText() { return commandName; } @Nullable @Override public String getLocationString() { return hint; } @Nullable @Override public Icon getIcon(boolean b) { return icon; } @Override public void setHint(@NotNull String hint) { this.hint = hint; } }
package nl.rubensten.texifyidea.structure; import com.intellij.navigation.ItemPresentation; import nl.rubensten.texifyidea.psi.LatexCommands; import nl.rubensten.texifyidea.util.TexifyUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author Ruben Schellekens */ public class LatexOtherCommandPresentation implements ItemPresentation { private final String commandName; private final Icon icon; private final String locationString; public LatexOtherCommandPresentation(LatexCommands command, Icon icon) { this.commandName = command.getName(); this.icon = icon; LatexCommands firstNext = TexifyUtil.getNextCommand(command); if (firstNext == null) { locationString = ""; return; } String lookup = firstNext.getCommandToken().getText(); this.locationString = lookup == null ? "" : lookup; } @Nullable @Override public String getPresentableText() { return commandName; } @Nullable @Override public String getLocationString() { return locationString; } @Nullable @Override public Icon getIcon(boolean b) { return icon; } }
Structure view contains value of \def or \let definition.
Structure view contains value of \def or \let definition.
Java
mit
Ruben-Sten/TeXiFy-IDEA,Ruben-Sten/TeXiFy-IDEA,Ruben-Sten/TeXiFy-IDEA,Ruben-Sten/TeXiFy-IDEA
java
## Code Before: package nl.rubensten.texifyidea.structure; import nl.rubensten.texifyidea.psi.LatexCommands; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author Ruben Schellekens */ public class LatexOtherCommandPresentation implements EditableHintPresentation { private final String commandName; private final Icon icon; private String hint = ""; public LatexOtherCommandPresentation(LatexCommands command, Icon icon) { this.commandName = command.getName(); this.icon = icon; } @Nullable @Override public String getPresentableText() { return commandName; } @Nullable @Override public String getLocationString() { return hint; } @Nullable @Override public Icon getIcon(boolean b) { return icon; } @Override public void setHint(@NotNull String hint) { this.hint = hint; } } ## Instruction: Structure view contains value of \def or \let definition. ## Code After: package nl.rubensten.texifyidea.structure; import com.intellij.navigation.ItemPresentation; import nl.rubensten.texifyidea.psi.LatexCommands; import nl.rubensten.texifyidea.util.TexifyUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author Ruben Schellekens */ public class LatexOtherCommandPresentation implements ItemPresentation { private final String commandName; private final Icon icon; private final String locationString; public LatexOtherCommandPresentation(LatexCommands command, Icon icon) { this.commandName = command.getName(); this.icon = icon; LatexCommands firstNext = TexifyUtil.getNextCommand(command); if (firstNext == null) { locationString = ""; return; } String lookup = firstNext.getCommandToken().getText(); this.locationString = lookup == null ? "" : lookup; } @Nullable @Override public String getPresentableText() { return commandName; } @Nullable @Override public String getLocationString() { return locationString; } @Nullable @Override public Icon getIcon(boolean b) { return icon; } }
# ... existing code ... package nl.rubensten.texifyidea.structure; import com.intellij.navigation.ItemPresentation; import nl.rubensten.texifyidea.psi.LatexCommands; import nl.rubensten.texifyidea.util.TexifyUtil; import org.jetbrains.annotations.Nullable; import javax.swing.*; # ... modified code ... /** * @author Ruben Schellekens */ public class LatexOtherCommandPresentation implements ItemPresentation { private final String commandName; private final Icon icon; private final String locationString; public LatexOtherCommandPresentation(LatexCommands command, Icon icon) { this.commandName = command.getName(); this.icon = icon; LatexCommands firstNext = TexifyUtil.getNextCommand(command); if (firstNext == null) { locationString = ""; return; } String lookup = firstNext.getCommandToken().getText(); this.locationString = lookup == null ? "" : lookup; } @Nullable ... @Nullable @Override public String getLocationString() { return locationString; } @Nullable ... public Icon getIcon(boolean b) { return icon; } } # ... rest of the code ...
68d7b3995c49abd8f7096f9498bdbddf6b696d81
back_office/models.py
back_office/models.py
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers')
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
Add enabled field to teacher model
Add enabled field to teacher model
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
python
## Code Before: from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers') ## Instruction: Add enabled field to teacher model ## Code After: from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
// ... existing code ... phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save() // ... rest of the code ...
d5de8224a0d67b74444a0ad7c755e3c7bc1c39a5
features.py
features.py
from PIL import Image from PIL.ImageStat import Stat from skimage.feature import local_binary_pattern class BaseFeatureExtractor(object): """ Basis for all feature extractors """ def extract(self, data): """ Return list of feature values """ raise NotImplementedError('No way of extracting features specified') class BasicImageStats(BaseFeatureExtractor): """ Compute some basic pixel-based image statistics """ def extract(self, img_path): stats = Stat(Image.open(img_path)) return stats.count \ + stats.sum \ + stats.sum2 \ + stats.mean \ + stats.median \ + stats.rms \ + stats.var \ + stats.stddev class LocalBinaryPatterns(BaseFeatureExtractor): """ Extract some LBPs """ def extract(self, img_path): image = Image.open(img_path) assert image.size > (500, 500), 'Image must have a size of at least 500x500' box = (100, 100, 500, 500) sub_img = image.crop(box) lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform') return lbp.flat
import numpy as np from PIL import Image from PIL.ImageStat import Stat from skimage.feature import local_binary_pattern class BaseFeatureExtractor(object): """ Basis for all feature extractors """ def extract(self, data): """ Return list of feature values """ raise NotImplementedError('No way of extracting features specified') class BasicImageStats(BaseFeatureExtractor): """ Compute some basic pixel-based image statistics """ def extract(self, img_path): stats = Stat(Image.open(img_path)) return stats.count \ + stats.sum \ + stats.sum2 \ + stats.mean \ + stats.median \ + stats.rms \ + stats.var \ + stats.stddev class LocalBinaryPatterns(BaseFeatureExtractor): """ Extract some LBPs """ def extract(self, img_path): image = Image.open(img_path) # assemble region of interest fac = 30/80 w, h = image.size box = ( int(fac * w), int(fac * h), int((1-fac) * w), int((1-fac) * h) ) sub_img = image.crop(box) # analyze local binary patterns lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform') hist = np.bincount( np.array(lbp.flat).astype(np.int64), minlength=50 ) return hist
Use histogram of local binary patterns
Use histogram of local binary patterns
Python
mit
kpj/PyClass
python
## Code Before: from PIL import Image from PIL.ImageStat import Stat from skimage.feature import local_binary_pattern class BaseFeatureExtractor(object): """ Basis for all feature extractors """ def extract(self, data): """ Return list of feature values """ raise NotImplementedError('No way of extracting features specified') class BasicImageStats(BaseFeatureExtractor): """ Compute some basic pixel-based image statistics """ def extract(self, img_path): stats = Stat(Image.open(img_path)) return stats.count \ + stats.sum \ + stats.sum2 \ + stats.mean \ + stats.median \ + stats.rms \ + stats.var \ + stats.stddev class LocalBinaryPatterns(BaseFeatureExtractor): """ Extract some LBPs """ def extract(self, img_path): image = Image.open(img_path) assert image.size > (500, 500), 'Image must have a size of at least 500x500' box = (100, 100, 500, 500) sub_img = image.crop(box) lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform') return lbp.flat ## Instruction: Use histogram of local binary patterns ## Code After: import numpy as np from PIL import Image from PIL.ImageStat import Stat from skimage.feature import local_binary_pattern class BaseFeatureExtractor(object): """ Basis for all feature extractors """ def extract(self, data): """ Return list of feature values """ raise NotImplementedError('No way of extracting features specified') class BasicImageStats(BaseFeatureExtractor): """ Compute some basic pixel-based image statistics """ def extract(self, img_path): stats = Stat(Image.open(img_path)) return stats.count \ + stats.sum \ + stats.sum2 \ + stats.mean \ + stats.median \ + stats.rms \ + stats.var \ + stats.stddev class LocalBinaryPatterns(BaseFeatureExtractor): """ Extract some LBPs """ def extract(self, img_path): image = Image.open(img_path) # assemble region of interest fac = 30/80 w, h = image.size box = ( int(fac * w), int(fac * h), int((1-fac) * w), int((1-fac) * h) ) sub_img = image.crop(box) # analyze local binary patterns lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform') hist = np.bincount( np.array(lbp.flat).astype(np.int64), minlength=50 ) return hist
# ... existing code ... import numpy as np from PIL import Image from PIL.ImageStat import Stat # ... modified code ... """ def extract(self, img_path): image = Image.open(img_path) # assemble region of interest fac = 30/80 w, h = image.size box = ( int(fac * w), int(fac * h), int((1-fac) * w), int((1-fac) * h) ) sub_img = image.crop(box) # analyze local binary patterns lbp = local_binary_pattern(sub_img.getdata(), 8 * 3, 3, 'uniform') hist = np.bincount( np.array(lbp.flat).astype(np.int64), minlength=50 ) return hist # ... rest of the code ...
aa008a13d9d10107f440dca71085f21d9622cd95
src/pybel/parser/baseparser.py
src/pybel/parser/baseparser.py
import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
Add line state to base parser
Add line state to base parser References #155
Python
mit
pybel/pybel,pybel/pybel,pybel/pybel
python
## Code Before: import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line): """Parses a string with the language represented by this parser :param line: A string representing an instance of this parser's language :type line: str """ return self.language.parseString(line) def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t) ## Instruction: Add line state to base parser References #155 ## Code After: import logging import time log = logging.getLogger(__name__) __all__ = ['BaseParser'] class BaseParser: """This abstract class represents a language backed by a PyParsing statement Multiple parsers can be easily chained together when they are all inheriting from this base class """ def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() def parse_lines(self, lines): """Parses multiple lines in succession :return: An list of the resulting parsed lines' tokens :rtype: list """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" t = time.time() self.language.streamline() log.info('Finished streamlining %s in %.02fs', self.__class__.__name__, time.time() - t)
// ... existing code ... def __init__(self, language, streamline=False): self.language = language #: The parser can hold an internal state of the current line self.line_number = None if streamline: self.streamline() // ... modified code ... """ return [self.parseString(line) for line in lines] def parseString(self, line, line_number=None): """Parses a string with the language represented by this parser :param str line: A string representing an instance of this parser's language :param int line_number: The current line number of the parser """ if line_number is None: return self.language.parseString(line) self.line_number = line_number result = self.language.parseString(line) self.line_number = None return result def streamline(self): """Streamlines the language represented by this parser to make queries run faster""" // ... rest of the code ...
43d4b6a3ccf49b3a0307da98344b0fe8f61acaf1
brew/rest.py
brew/rest.py
import json import time import jsonschema from pkg_resources import resource_string from flask import request, jsonify from brew import app, controller, machine, mongo @app.route('/api/recipe', methods=['POST']) def create_recipe(): schema = resource_string(__name__, 'data/recipe.schema.json').decode('utf-8') recipe_json = request.get_json() schema_dict = json.loads(schema) jsonschema.validate(schema_dict, recipe_json) mongo.db.recipes.insert(recipe_json) return jsonify(success=True) @app.route('/api/status', methods=['GET']) def status(): return jsonify(timestamp=int(time.time() * 1000), step=machine.current_step, temperature=controller.get_temperature())
import json import time import jsonschema from pkg_resources import resource_string from flask import request, jsonify from brew import app, controller, machine, mongo @app.route('/api/recipe', methods=['POST']) def create_recipe(): schema = resource_string(__name__, 'data/recipe.schema.json').decode('utf-8') recipe_json = request.get_json() schema_dict = json.loads(schema) jsonschema.validate(schema_dict, recipe_json) mongo.db.recipes.insert(recipe_json) for malt in recipe_json['malts']: d = {'name': malt['name']} mongo.db.malts.update(d, d, True) return jsonify(success=True) @app.route('/api/status', methods=['GET']) def status(): return jsonify(timestamp=int(time.time() * 1000), step=machine.current_step, temperature=controller.get_temperature())
Save malts for future reference
Save malts for future reference
Python
mit
brewpeople/brewmeister,brewpeople/brewmeister,brewpeople/brewmeister
python
## Code Before: import json import time import jsonschema from pkg_resources import resource_string from flask import request, jsonify from brew import app, controller, machine, mongo @app.route('/api/recipe', methods=['POST']) def create_recipe(): schema = resource_string(__name__, 'data/recipe.schema.json').decode('utf-8') recipe_json = request.get_json() schema_dict = json.loads(schema) jsonschema.validate(schema_dict, recipe_json) mongo.db.recipes.insert(recipe_json) return jsonify(success=True) @app.route('/api/status', methods=['GET']) def status(): return jsonify(timestamp=int(time.time() * 1000), step=machine.current_step, temperature=controller.get_temperature()) ## Instruction: Save malts for future reference ## Code After: import json import time import jsonschema from pkg_resources import resource_string from flask import request, jsonify from brew import app, controller, machine, mongo @app.route('/api/recipe', methods=['POST']) def create_recipe(): schema = resource_string(__name__, 'data/recipe.schema.json').decode('utf-8') recipe_json = request.get_json() schema_dict = json.loads(schema) jsonschema.validate(schema_dict, recipe_json) mongo.db.recipes.insert(recipe_json) for malt in recipe_json['malts']: d = {'name': malt['name']} mongo.db.malts.update(d, d, True) return jsonify(success=True) @app.route('/api/status', methods=['GET']) def status(): return jsonify(timestamp=int(time.time() * 1000), step=machine.current_step, temperature=controller.get_temperature())
... schema_dict = json.loads(schema) jsonschema.validate(schema_dict, recipe_json) mongo.db.recipes.insert(recipe_json) for malt in recipe_json['malts']: d = {'name': malt['name']} mongo.db.malts.update(d, d, True) return jsonify(success=True) ...
1bf6211f2fd5aef99e529fdc0e714b1a36ace346
gallery/util.py
gallery/util.py
import os from addict import Dict from gallery.models import File def get_dir_file_contents(dir_id): print(File.query.filter(File.parent == dir_id).all()) return File.query.filter(File.parent == dir_id).all() def get_dir_tree_dict(): path = os.path.normpath("/gallery-data/root") file_tree = Dict() for root, _, files in os.walk(path, topdown=True): path = root.split('/') path.pop(0) file_tree_fd = file_tree for part in path: file_tree_fd = file_tree_fd[part] file_tree_fd['.'] = files return file_tree def convert_bytes_to_utf8(dic): for key in dic: if isinstance(key, bytes): k = key.decode('utf-8') v = dic[key] del dic[key] dic[k] = v if isinstance(dic[key], bytes): v = dic[key].decode('utf-8') dic[key] = v return dic def allowed_file(filename): return '.' in filename and filename.lower().rsplit('.', 1)[1] in \ [ 'txt', 'png', 'jpg', 'jpeg', 'mpg', 'mp4', 'avi' ]
import os from addict import Dict from gallery.models import File def get_dir_file_contents(dir_id): print(File.query.filter(File.parent == dir_id).all()) return File.query.filter(File.parent == dir_id).all() def get_dir_tree_dict(): path = os.path.normpath("/gallery-data/root") file_tree = Dict() for root, _, files in os.walk(path, topdown=True): path = root.split('/') path.pop(0) file_tree_fd = file_tree for part in path: file_tree_fd = file_tree_fd[part] file_tree_fd['.'] = files return file_tree def convert_bytes_to_utf8(dic): for key in dic: if isinstance(key, bytes): k = key.decode('utf-8') v = dic[key] del dic[key] dic[k] = v if isinstance(dic[key], bytes): v = dic[key].decode('utf-8') dic[key] = v return dic def allowed_file(filename): return '.' in filename and filename.lower().rsplit('.', 1)[1] in \ [ 'txt', 'png', 'jpg', 'jpeg', 'mpg', 'mp4', 'avi', 'cr2' ]
Add CR2 to allowed files
Add CR2 to allowed files
Python
mit
liam-middlebrook/gallery,liam-middlebrook/gallery,liam-middlebrook/gallery,liam-middlebrook/gallery
python
## Code Before: import os from addict import Dict from gallery.models import File def get_dir_file_contents(dir_id): print(File.query.filter(File.parent == dir_id).all()) return File.query.filter(File.parent == dir_id).all() def get_dir_tree_dict(): path = os.path.normpath("/gallery-data/root") file_tree = Dict() for root, _, files in os.walk(path, topdown=True): path = root.split('/') path.pop(0) file_tree_fd = file_tree for part in path: file_tree_fd = file_tree_fd[part] file_tree_fd['.'] = files return file_tree def convert_bytes_to_utf8(dic): for key in dic: if isinstance(key, bytes): k = key.decode('utf-8') v = dic[key] del dic[key] dic[k] = v if isinstance(dic[key], bytes): v = dic[key].decode('utf-8') dic[key] = v return dic def allowed_file(filename): return '.' in filename and filename.lower().rsplit('.', 1)[1] in \ [ 'txt', 'png', 'jpg', 'jpeg', 'mpg', 'mp4', 'avi' ] ## Instruction: Add CR2 to allowed files ## Code After: import os from addict import Dict from gallery.models import File def get_dir_file_contents(dir_id): print(File.query.filter(File.parent == dir_id).all()) return File.query.filter(File.parent == dir_id).all() def get_dir_tree_dict(): path = os.path.normpath("/gallery-data/root") file_tree = Dict() for root, _, files in os.walk(path, topdown=True): path = root.split('/') path.pop(0) file_tree_fd = file_tree for part in path: file_tree_fd = file_tree_fd[part] file_tree_fd['.'] = files return file_tree def convert_bytes_to_utf8(dic): for key in dic: if isinstance(key, bytes): k = key.decode('utf-8') v = dic[key] del dic[key] dic[k] = v if isinstance(dic[key], bytes): v = dic[key].decode('utf-8') dic[key] = v return dic def allowed_file(filename): return '.' in filename and filename.lower().rsplit('.', 1)[1] in \ [ 'txt', 'png', 'jpg', 'jpeg', 'mpg', 'mp4', 'avi', 'cr2' ]
# ... existing code ... 'jpeg', 'mpg', 'mp4', 'avi', 'cr2' ] # ... rest of the code ...
aae994402b1b16a2bca4a486dad4bb452770eb26
tests/pipeline/test_provider_healthcheck.py
tests/pipeline/test_provider_healthcheck.py
"""Test Provider Health Check setting.""" from foremast.pipeline.construct_pipeline_block import check_provider_healthcheck TEST_SETTINGS = {'app': {'eureka_enabled': False}, 'asg': {'provider_healthcheck': {}}} def test_provider_healthcheck(): """Make sure default Provider Health Check works.""" provider_healthcheck, has_provider_healthcheck = check_provider_healthcheck(settings=TEST_SETTINGS) assert provider_healthcheck == [] assert has_provider_healthcheck == False
"""Test Provider Health Check setting.""" from foremast.pipeline.construct_pipeline_block import check_provider_healthcheck TEST_SETTINGS = {'app': {'eureka_enabled': False}, 'asg': {'provider_healthcheck': {}}} def test_provider_healthcheck(): """Make sure default Provider Health Check works.""" health_checks = check_provider_healthcheck(settings=TEST_SETTINGS) assert health_checks.providers == [] assert health_checks.has_healthcheck == False
Update Provider Health Check sanity
test: Update Provider Health Check sanity See also: PSOBAT-2465
Python
apache-2.0
gogoair/foremast,gogoair/foremast
python
## Code Before: """Test Provider Health Check setting.""" from foremast.pipeline.construct_pipeline_block import check_provider_healthcheck TEST_SETTINGS = {'app': {'eureka_enabled': False}, 'asg': {'provider_healthcheck': {}}} def test_provider_healthcheck(): """Make sure default Provider Health Check works.""" provider_healthcheck, has_provider_healthcheck = check_provider_healthcheck(settings=TEST_SETTINGS) assert provider_healthcheck == [] assert has_provider_healthcheck == False ## Instruction: test: Update Provider Health Check sanity See also: PSOBAT-2465 ## Code After: """Test Provider Health Check setting.""" from foremast.pipeline.construct_pipeline_block import check_provider_healthcheck TEST_SETTINGS = {'app': {'eureka_enabled': False}, 'asg': {'provider_healthcheck': {}}} def test_provider_healthcheck(): """Make sure default Provider Health Check works.""" health_checks = check_provider_healthcheck(settings=TEST_SETTINGS) assert health_checks.providers == [] assert health_checks.has_healthcheck == False
... def test_provider_healthcheck(): """Make sure default Provider Health Check works.""" health_checks = check_provider_healthcheck(settings=TEST_SETTINGS) assert health_checks.providers == [] assert health_checks.has_healthcheck == False ...
516ae314353459ec65f498a64eb5254dcc0fe24a
subprojects/data-structures/build.gradle.kts
subprojects/data-structures/build.gradle.kts
plugins { id("gradlebuild.distribution.implementation-java") } dependencies { implementation(project(":base-services")) }
plugins { id("gradlebuild.distribution.implementation-java") } description = "A set of generic data structures." dependencies { implementation(project(":base-services")) }
Add description to `:data-structures` project
Add description to `:data-structures` project
Kotlin
apache-2.0
blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle
kotlin
## Code Before: plugins { id("gradlebuild.distribution.implementation-java") } dependencies { implementation(project(":base-services")) } ## Instruction: Add description to `:data-structures` project ## Code After: plugins { id("gradlebuild.distribution.implementation-java") } description = "A set of generic data structures." dependencies { implementation(project(":base-services")) }
# ... existing code ... id("gradlebuild.distribution.implementation-java") } description = "A set of generic data structures." dependencies { implementation(project(":base-services")) } # ... rest of the code ...
3045f6ffbd8433d60178fee59550d30064015b46
tm/tm.py
tm/tm.py
import sys import subprocess import argparse __version__ = 1.0 __description__ = "A tmux wrapper featuring shortcuts and session presets." def main(argv): parser = argparse.ArgumentParser(description=__description__) parser.add_argument("session", metavar="session", type=str, nargs="?", help="the name of the tmux session to start or attach") parser.add_argument("-l", "--list", action="store_true", help="list all open sessions and session presets") parser.add_argument("-k", "--kill", metavar="session", action="store", help="kill a session") args = parser.parse_args() err = "" if args.kill: pass elif args.list: pass elif args.session: pass if __name__ == "__main__": main(sys.argv[1:])
import sys import subprocess import argparse __version__ = 1.0 __description__ = "A tmux wrapper featuring shortcuts and session presets." def main(argv): parser = argparse.ArgumentParser(description=__description__) parser.add_argument("session", metavar="session", type=str, nargs="?", help="the name of the tmux session to start or attach") parser.add_argument("-l", "--list", action="store_true", help="list all open sessions and session presets") parser.add_argument("-k", "--kill", metavar="session", action="store", help="kill a session") args = parser.parse_args() if len(argv) == 0: parser.print_help() if args.kill: p = subprocess.Popen("tmux kill-session -t {}".format(args.kill), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() elif args.list: p = subprocess.Popen("tmux ls", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() elif args.session: p = subprocess.Popen("tmux new -s {}".format(args.session), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if __name__ == "__main__": main(sys.argv[1:])
Add kill, list, and create commands
Add kill, list, and create commands
Python
mit
ethanal/tm
python
## Code Before: import sys import subprocess import argparse __version__ = 1.0 __description__ = "A tmux wrapper featuring shortcuts and session presets." def main(argv): parser = argparse.ArgumentParser(description=__description__) parser.add_argument("session", metavar="session", type=str, nargs="?", help="the name of the tmux session to start or attach") parser.add_argument("-l", "--list", action="store_true", help="list all open sessions and session presets") parser.add_argument("-k", "--kill", metavar="session", action="store", help="kill a session") args = parser.parse_args() err = "" if args.kill: pass elif args.list: pass elif args.session: pass if __name__ == "__main__": main(sys.argv[1:]) ## Instruction: Add kill, list, and create commands ## Code After: import sys import subprocess import argparse __version__ = 1.0 __description__ = "A tmux wrapper featuring shortcuts and session presets." def main(argv): parser = argparse.ArgumentParser(description=__description__) parser.add_argument("session", metavar="session", type=str, nargs="?", help="the name of the tmux session to start or attach") parser.add_argument("-l", "--list", action="store_true", help="list all open sessions and session presets") parser.add_argument("-k", "--kill", metavar="session", action="store", help="kill a session") args = parser.parse_args() if len(argv) == 0: parser.print_help() if args.kill: p = subprocess.Popen("tmux kill-session -t {}".format(args.kill), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() elif args.list: p = subprocess.Popen("tmux ls", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() elif args.session: p = subprocess.Popen("tmux new -s {}".format(args.session), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if __name__ == "__main__": main(sys.argv[1:])
# ... existing code ... args = parser.parse_args() if len(argv) == 0: parser.print_help() if args.kill: p = subprocess.Popen("tmux kill-session -t {}".format(args.kill), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() elif args.list: p = subprocess.Popen("tmux ls", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() elif args.session: p = subprocess.Popen("tmux new -s {}".format(args.session), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() if __name__ == "__main__": main(sys.argv[1:]) # ... rest of the code ...
d080871e248cdf457fd1ff0023b5a651456d5b0b
openfisca_senegal/survey_scenarios.py
openfisca_senegal/survey_scenarios.py
from openfisca_senegal import CountryTaxBenefitSystem as SenegalTaxBenefitSystem from openfisca_survey_manager.scenarios import AbstractSurveyScenario class SenegalSurveyScenario(AbstractSurveyScenario): id_variable_by_entity_key = dict( famille = 'id_famille', ) role_variable_by_entity_key = dict( famille = 'role_famille', ) def __init__(self, input_data_frame = None, tax_benefit_system = None, reference_tax_benefit_system = None, year = None): super(SenegalSurveyScenario, self).__init__() assert input_data_frame is not None assert year is not None self.year = year if tax_benefit_system is None: tax_benefit_system = SenegalTaxBenefitSystem() self.set_tax_benefit_systems( tax_benefit_system = tax_benefit_system, reference_tax_benefit_system = reference_tax_benefit_system ) self.used_as_input_variables = list( set(tax_benefit_system.column_by_name.keys()).intersection( set(input_data_frame.columns) )) self.init_from_data_frame(input_data_frame = input_data_frame) self.new_simulation() if reference_tax_benefit_system is not None: self.new_simulation(reference = True)
from openfisca_senegal import CountryTaxBenefitSystem as SenegalTaxBenefitSystem from openfisca_survey_manager.scenarios import AbstractSurveyScenario class SenegalSurveyScenario(AbstractSurveyScenario): id_variable_by_entity_key = dict( famille = 'id_famille', ) role_variable_by_entity_key = dict( famille = 'role_famille', ) def __init__(self, input_data_frame = None, tax_benefit_system = None, baseline_tax_benefit_system = None, year = None): super(SenegalSurveyScenario, self).__init__() assert input_data_frame is not None assert year is not None self.year = year if tax_benefit_system is None: tax_benefit_system = SenegalTaxBenefitSystem() self.set_tax_benefit_systems( tax_benefit_system = tax_benefit_system, baseline_tax_benefit_system = baseline_tax_benefit_system ) self.used_as_input_variables = list( set(tax_benefit_system.variables.keys()).intersection( set(input_data_frame.columns) )) self.init_from_data_frame(input_data_frame = input_data_frame) self.new_simulation() if baseline_tax_benefit_system is not None: self.new_simulation(use_baseline = True)
Fix survey_scenario (use core v20 syntax)
Fix survey_scenario (use core v20 syntax)
Python
agpl-3.0
openfisca/senegal
python
## Code Before: from openfisca_senegal import CountryTaxBenefitSystem as SenegalTaxBenefitSystem from openfisca_survey_manager.scenarios import AbstractSurveyScenario class SenegalSurveyScenario(AbstractSurveyScenario): id_variable_by_entity_key = dict( famille = 'id_famille', ) role_variable_by_entity_key = dict( famille = 'role_famille', ) def __init__(self, input_data_frame = None, tax_benefit_system = None, reference_tax_benefit_system = None, year = None): super(SenegalSurveyScenario, self).__init__() assert input_data_frame is not None assert year is not None self.year = year if tax_benefit_system is None: tax_benefit_system = SenegalTaxBenefitSystem() self.set_tax_benefit_systems( tax_benefit_system = tax_benefit_system, reference_tax_benefit_system = reference_tax_benefit_system ) self.used_as_input_variables = list( set(tax_benefit_system.column_by_name.keys()).intersection( set(input_data_frame.columns) )) self.init_from_data_frame(input_data_frame = input_data_frame) self.new_simulation() if reference_tax_benefit_system is not None: self.new_simulation(reference = True) ## Instruction: Fix survey_scenario (use core v20 syntax) ## Code After: from openfisca_senegal import CountryTaxBenefitSystem as SenegalTaxBenefitSystem from openfisca_survey_manager.scenarios import AbstractSurveyScenario class SenegalSurveyScenario(AbstractSurveyScenario): id_variable_by_entity_key = dict( famille = 'id_famille', ) role_variable_by_entity_key = dict( famille = 'role_famille', ) def __init__(self, input_data_frame = None, tax_benefit_system = None, baseline_tax_benefit_system = None, year = None): super(SenegalSurveyScenario, self).__init__() assert input_data_frame is not None assert year is not None self.year = year if tax_benefit_system is None: tax_benefit_system = SenegalTaxBenefitSystem() self.set_tax_benefit_systems( tax_benefit_system = tax_benefit_system, baseline_tax_benefit_system = baseline_tax_benefit_system ) self.used_as_input_variables = list( set(tax_benefit_system.variables.keys()).intersection( set(input_data_frame.columns) )) self.init_from_data_frame(input_data_frame = input_data_frame) self.new_simulation() if baseline_tax_benefit_system is not None: self.new_simulation(use_baseline = True)
// ... existing code ... ) def __init__(self, input_data_frame = None, tax_benefit_system = None, baseline_tax_benefit_system = None, year = None): super(SenegalSurveyScenario, self).__init__() assert input_data_frame is not None assert year is not None // ... modified code ... tax_benefit_system = SenegalTaxBenefitSystem() self.set_tax_benefit_systems( tax_benefit_system = tax_benefit_system, baseline_tax_benefit_system = baseline_tax_benefit_system ) self.used_as_input_variables = list( set(tax_benefit_system.variables.keys()).intersection( set(input_data_frame.columns) )) self.init_from_data_frame(input_data_frame = input_data_frame) self.new_simulation() if baseline_tax_benefit_system is not None: self.new_simulation(use_baseline = True) // ... rest of the code ...
f4429e49c8b493fa285d169a41b82cb761716705
tests/explorers_tests/test_additive_ou.py
tests/explorers_tests/test_additive_ou.py
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 dt = 0.5 sigma = 0.001 theta = 0.3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU(action_size, dt=dt, theta=theta, sigma=sigma) for t in range(10000): a = explorer.select_action(t, greedy_action_func) print(a)
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU() for t in range(100): a = explorer.select_action(t, greedy_action_func) print(t, a)
Fix a test for AdditiveOU
Fix a test for AdditiveOU
Python
mit
toslunar/chainerrl,toslunar/chainerrl
python
## Code Before: from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 dt = 0.5 sigma = 0.001 theta = 0.3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU(action_size, dt=dt, theta=theta, sigma=sigma) for t in range(10000): a = explorer.select_action(t, greedy_action_func) print(a) ## Instruction: Fix a test for AdditiveOU ## Code After: from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() import unittest import numpy as np from chainerrl.explorers.additive_ou import AdditiveOU class TestAdditiveOU(unittest.TestCase): def test(self): action_size = 3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU() for t in range(100): a = explorer.select_action(t, greedy_action_func) print(t, a)
// ... existing code ... def test(self): action_size = 3 def greedy_action_func(): return np.asarray([0] * action_size, dtype=np.float32) explorer = AdditiveOU() for t in range(100): a = explorer.select_action(t, greedy_action_func) print(t, a) // ... rest of the code ...
a6e868803e1336d83ee8863d15896880603fc777
tornwamp/customize.py
tornwamp/customize.py
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc from tornwamp.messages import Code processors = { Code.HELLO: HelloProcessor, Code.GOODBYE: GoodbyeProcessor, Code.SUBSCRIBE: pubsub.SubscribeProcessor, Code.CALL: rpc.CallProcessor } # 2: 'welcome', # 3: 'abort', # 4: 'challenge', # 5: 'authenticate', # 7: 'heartbeat', # 8: 'error', # 16: 'publish', # 17: 'published', # 32: 'subscribe', # 33: 'subscribed', # 34: 'unsubscribe', # 35: 'unsubscribed', # 36: 'event', # 49: 'cancel', # 50: 'result', # 64: 'register', # 65: 'registered', # 66: 'unregister', # 67: 'unregistered', # 68: 'invocation', # 69: 'interrupt', # 70: 'yield'
from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc from tornwamp.messages import Code processors = { Code.HELLO: HelloProcessor, Code.GOODBYE: GoodbyeProcessor, Code.SUBSCRIBE: pubsub.SubscribeProcessor, Code.CALL: rpc.CallProcessor, Code.PUBLISH: pubsub.PublishProcessor } # 2: 'welcome', # 3: 'abort', # 4: 'challenge', # 5: 'authenticate', # 7: 'heartbeat', # 8: 'error', # 16: 'publish', # 17: 'published', # 32: 'subscribe', # 33: 'subscribed', # 34: 'unsubscribe', # 35: 'unsubscribed', # 36: 'event', # 49: 'cancel', # 50: 'result', # 64: 'register', # 65: 'registered', # 66: 'unregister', # 67: 'unregistered', # 68: 'invocation', # 69: 'interrupt', # 70: 'yield'
Add PublishProcessor to processors' list
Add PublishProcessor to processors' list
Python
apache-2.0
ef-ctx/tornwamp
python
## Code Before: from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc from tornwamp.messages import Code processors = { Code.HELLO: HelloProcessor, Code.GOODBYE: GoodbyeProcessor, Code.SUBSCRIBE: pubsub.SubscribeProcessor, Code.CALL: rpc.CallProcessor } # 2: 'welcome', # 3: 'abort', # 4: 'challenge', # 5: 'authenticate', # 7: 'heartbeat', # 8: 'error', # 16: 'publish', # 17: 'published', # 32: 'subscribe', # 33: 'subscribed', # 34: 'unsubscribe', # 35: 'unsubscribed', # 36: 'event', # 49: 'cancel', # 50: 'result', # 64: 'register', # 65: 'registered', # 66: 'unregister', # 67: 'unregistered', # 68: 'invocation', # 69: 'interrupt', # 70: 'yield' ## Instruction: Add PublishProcessor to processors' list ## Code After: from tornwamp.processors import GoodbyeProcessor, HelloProcessor, pubsub, rpc from tornwamp.messages import Code processors = { Code.HELLO: HelloProcessor, Code.GOODBYE: GoodbyeProcessor, Code.SUBSCRIBE: pubsub.SubscribeProcessor, Code.CALL: rpc.CallProcessor, Code.PUBLISH: pubsub.PublishProcessor } # 2: 'welcome', # 3: 'abort', # 4: 'challenge', # 5: 'authenticate', # 7: 'heartbeat', # 8: 'error', # 16: 'publish', # 17: 'published', # 32: 'subscribe', # 33: 'subscribed', # 34: 'unsubscribe', # 35: 'unsubscribed', # 36: 'event', # 49: 'cancel', # 50: 'result', # 64: 'register', # 65: 'registered', # 66: 'unregister', # 67: 'unregistered', # 68: 'invocation', # 69: 'interrupt', # 70: 'yield'
// ... existing code ... Code.HELLO: HelloProcessor, Code.GOODBYE: GoodbyeProcessor, Code.SUBSCRIBE: pubsub.SubscribeProcessor, Code.CALL: rpc.CallProcessor, Code.PUBLISH: pubsub.PublishProcessor } # 2: 'welcome', # 3: 'abort', // ... rest of the code ...
ce644b55ee72790d111435b81fb76a2ea15913c5
gabbi/fixture.py
gabbi/fixture.py
"""Manage fixtures for gabbi at the test file level.""" def start_fixture(fixture_class): """Create the fixture class and start it.""" fixture = fixture_class() fixture.start() def stop_fixture(fixture_class): """Create the fixture class and stop it.""" fixture = fixture_class() fixture.stop() class GabbiFixture(object): """A singleton of a fixture.""" _instance = None _started = False _halted = False def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(GabbiFixture, cls).__new__(cls) return cls._instance def start(self): if not self._started and not self._halted: self._started = True self.start_fixture() def start_fixture(self): pass def stop(self): if self._started: self.stop_fixture() self._started = False self._halted = True def stop_fixture(self): pass
"""Manage fixtures for gabbi at the test file level.""" def start_fixture(fixture_class): """Create the fixture class and start it.""" fixture = fixture_class() fixture.start() def stop_fixture(fixture_class): """Re-Create the fixture class and stop it.""" fixture = fixture_class() fixture.stop() class GabbiFixtureError(Exception): """Generic exception for GabbiFixture.""" pass class GabbiFixture(object): """A singleton of a fixture. Subclasses must implement start_fixture and stop_fixture, each of which contain the logic for stopping and starting whatever the fixture is. What a fixture is is left as an exercise for the implementor. A singleton is used so as to avoid in process duplication of the same fixture. For environments where concurrent testing will be used, the fixture should guard against collisions by uniquifying filenames, database names and other external resources. If calling code attempts to start an already started fixture, an Exception will be raised: GabbiFixtureError. """ _instance = None _started = False def __new__(cls, *args, **kwargs): """Create the new instance or return an existing one.""" if not cls._instance: cls._instance = super(GabbiFixture, cls).__new__(cls) return cls._instance def start(self): """Request that the fixture be started.""" if not self._started: self.start_fixture() self._started = True else: raise GabbiFixtureError('fixture %s already started' % self) def start_fixture(self): """Implement the actual workings of starting the fixture here.""" pass def stop(self): """Request that the fixture be stopped.""" if self._started: self.stop_fixture() self._started = False def stop_fixture(self): """Implement the actual workings of stopping the fixture here.""" pass
Add some docs to GabbiFixture
Add some docs to GabbiFixture
Python
apache-2.0
FND/gabbi,jasonamyers/gabbi,jasonamyers/gabbi,FND/gabbi
python
## Code Before: """Manage fixtures for gabbi at the test file level.""" def start_fixture(fixture_class): """Create the fixture class and start it.""" fixture = fixture_class() fixture.start() def stop_fixture(fixture_class): """Create the fixture class and stop it.""" fixture = fixture_class() fixture.stop() class GabbiFixture(object): """A singleton of a fixture.""" _instance = None _started = False _halted = False def __new__(cls, *args, **kwargs): if not cls._instance: cls._instance = super(GabbiFixture, cls).__new__(cls) return cls._instance def start(self): if not self._started and not self._halted: self._started = True self.start_fixture() def start_fixture(self): pass def stop(self): if self._started: self.stop_fixture() self._started = False self._halted = True def stop_fixture(self): pass ## Instruction: Add some docs to GabbiFixture ## Code After: """Manage fixtures for gabbi at the test file level.""" def start_fixture(fixture_class): """Create the fixture class and start it.""" fixture = fixture_class() fixture.start() def stop_fixture(fixture_class): """Re-Create the fixture class and stop it.""" fixture = fixture_class() fixture.stop() class GabbiFixtureError(Exception): """Generic exception for GabbiFixture.""" pass class GabbiFixture(object): """A singleton of a fixture. Subclasses must implement start_fixture and stop_fixture, each of which contain the logic for stopping and starting whatever the fixture is. What a fixture is is left as an exercise for the implementor. A singleton is used so as to avoid in process duplication of the same fixture. For environments where concurrent testing will be used, the fixture should guard against collisions by uniquifying filenames, database names and other external resources. If calling code attempts to start an already started fixture, an Exception will be raised: GabbiFixtureError. """ _instance = None _started = False def __new__(cls, *args, **kwargs): """Create the new instance or return an existing one.""" if not cls._instance: cls._instance = super(GabbiFixture, cls).__new__(cls) return cls._instance def start(self): """Request that the fixture be started.""" if not self._started: self.start_fixture() self._started = True else: raise GabbiFixtureError('fixture %s already started' % self) def start_fixture(self): """Implement the actual workings of starting the fixture here.""" pass def stop(self): """Request that the fixture be stopped.""" if self._started: self.stop_fixture() self._started = False def stop_fixture(self): """Implement the actual workings of stopping the fixture here.""" pass
# ... existing code ... def stop_fixture(fixture_class): """Re-Create the fixture class and stop it.""" fixture = fixture_class() fixture.stop() class GabbiFixtureError(Exception): """Generic exception for GabbiFixture.""" pass class GabbiFixture(object): """A singleton of a fixture. Subclasses must implement start_fixture and stop_fixture, each of which contain the logic for stopping and starting whatever the fixture is. What a fixture is is left as an exercise for the implementor. A singleton is used so as to avoid in process duplication of the same fixture. For environments where concurrent testing will be used, the fixture should guard against collisions by uniquifying filenames, database names and other external resources. If calling code attempts to start an already started fixture, an Exception will be raised: GabbiFixtureError. """ _instance = None _started = False def __new__(cls, *args, **kwargs): """Create the new instance or return an existing one.""" if not cls._instance: cls._instance = super(GabbiFixture, cls).__new__(cls) return cls._instance def start(self): """Request that the fixture be started.""" if not self._started: self.start_fixture() self._started = True else: raise GabbiFixtureError('fixture %s already started' % self) def start_fixture(self): """Implement the actual workings of starting the fixture here.""" pass def stop(self): """Request that the fixture be stopped.""" if self._started: self.stop_fixture() self._started = False def stop_fixture(self): """Implement the actual workings of stopping the fixture here.""" pass # ... rest of the code ...
c85fbf33d22a9775f9d22b863027eb50b41923c2
src/excel_sheet_column_title.py
src/excel_sheet_column_title.py
# @return a string def convertToTitle(num): if __name__ == '__main__': test = [1:'A', 2:'B', 3:'C', 26:'Z', 27:'AA', 28:'AB']
# @return a string def convertToTitle(num): remainders = [] quotient = num while quotient > 26: remainder = quotient%26 or 26 quotient = (quotient-remainder)/26 remainders.append(remainder) remainders.append(quotient) chars = [] for i in reversed(remainders): chars.append(chr(i+ord('A')-1)) return ''.join(chars) if __name__ == '__main__': test = {1:'A', 2:'B', 3:'C', 26:'Z', 27:'AA', 28:'AB', 52:'AZ', 731:'ABC'} for k,v in test.iteritems(): output = convertToTitle(k) if v != output: print 'Input:', k print 'Output:', output print 'Expected:', v
Add solution for excel sheet column title
Add solution for excel sheet column title
Python
mit
chancyWu/leetcode
python
## Code Before: # @return a string def convertToTitle(num): if __name__ == '__main__': test = [1:'A', 2:'B', 3:'C', 26:'Z', 27:'AA', 28:'AB'] ## Instruction: Add solution for excel sheet column title ## Code After: # @return a string def convertToTitle(num): remainders = [] quotient = num while quotient > 26: remainder = quotient%26 or 26 quotient = (quotient-remainder)/26 remainders.append(remainder) remainders.append(quotient) chars = [] for i in reversed(remainders): chars.append(chr(i+ord('A')-1)) return ''.join(chars) if __name__ == '__main__': test = {1:'A', 2:'B', 3:'C', 26:'Z', 27:'AA', 28:'AB', 52:'AZ', 731:'ABC'} for k,v in test.iteritems(): output = convertToTitle(k) if v != output: print 'Input:', k print 'Output:', output print 'Expected:', v
# ... existing code ... # @return a string def convertToTitle(num): remainders = [] quotient = num while quotient > 26: remainder = quotient%26 or 26 quotient = (quotient-remainder)/26 remainders.append(remainder) remainders.append(quotient) chars = [] for i in reversed(remainders): chars.append(chr(i+ord('A')-1)) return ''.join(chars) if __name__ == '__main__': test = {1:'A', 2:'B', 3:'C', 26:'Z', 27:'AA', 28:'AB', 52:'AZ', 731:'ABC'} for k,v in test.iteritems(): output = convertToTitle(k) if v != output: print 'Input:', k print 'Output:', output print 'Expected:', v # ... rest of the code ...
bb80ef40356be4384b0ddf0e4510865d4d33c654
appengine_config.py
appengine_config.py
import site import os.path # add `lib` subdirectory as a site packages directory, so our `main` module can load # third-party libraries. site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
from google.appengine.ext import vendor vendor.add('lib')
Use a newer method for specifying the vendored packages directory.
Use a newer method for specifying the vendored packages directory.
Python
mit
boulder-python/boulderpython.org,boulder-python/boulderpython.org,boulder-python/boulderpython.org,boulder-python/boulderpython.org
python
## Code Before: import site import os.path # add `lib` subdirectory as a site packages directory, so our `main` module can load # third-party libraries. site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib')) ## Instruction: Use a newer method for specifying the vendored packages directory. ## Code After: from google.appengine.ext import vendor vendor.add('lib')
# ... existing code ... from google.appengine.ext import vendor vendor.add('lib') # ... rest of the code ...
370809a6715675aee98b05bdd3c4d0c26a5d156a
meals/models.py
meals/models.py
from django.db import models class Wbw_list(models.Model): list_id = models.IntegerField(unique=True) name = models.CharField(max_length=200, blank=True) def __str__(self): return self.name class Participant(models.Model): wbw_list = models.ManyToManyField(Wbw_list, through='Participation') wbw_id = models.IntegerField(unique=True) class Participation(models.Model): name = models.CharField(max_length=200) wbw_list = models.ForeignKey(Wbw_list) participant = models.ForeignKey(Participant) def __str__(self): return self.name class Bystander(models.Model): name = models.CharField(max_length=200) participant = models.ForeignKey(Participant) class Meal(models.Model): price = models.IntegerField(default=0) date = models.DateTimeField(auto_now=True) completed = models.BooleanField(default=False) description = models.CharField(max_length=200, blank=True) wbw_list = models.ForeignKey(Wbw_list, null=True) participants = models.ManyToManyField(Participant, blank=True) bystanders = models.ManyToManyField(Bystander, blank=True) payer = models.ForeignKey(Participant, null=True, related_name='paymeal')
from django.db import models class Wbw_list(models.Model): list_id = models.IntegerField(unique=True) name = models.CharField(max_length=200, blank=True) def __str__(self): return self.name class Participant(models.Model): wbw_list = models.ManyToManyField(Wbw_list, through='Participation') wbw_id = models.IntegerField(unique=True) class Participation(models.Model): name = models.CharField(max_length=200) wbw_list = models.ForeignKey(Wbw_list) participant = models.ForeignKey(Participant) def __str__(self): return self.name class Bystander(models.Model): name = models.CharField(max_length=200) participant = models.ForeignKey(Participant) class Meal(models.Model): price = models.IntegerField(default=0) date = models.DateTimeField(auto_now=True) completed = models.BooleanField(default=False) description = models.CharField(max_length=200, blank=True) wbw_list = models.ForeignKey(Wbw_list, null=True) participants = models.ManyToManyField(Participant, blank=True) bystanders = models.ManyToManyField(Bystander, blank=True) payer = models.ForeignKey(Participant, null=True, blank=True, related_name='paymeal')
Allow meals with unknown payer
Allow meals with unknown payer
Python
cc0-1.0
joostrijneveld/eetvoudig,joostrijneveld/eetvoudig,joostrijneveld/eetvoudig
python
## Code Before: from django.db import models class Wbw_list(models.Model): list_id = models.IntegerField(unique=True) name = models.CharField(max_length=200, blank=True) def __str__(self): return self.name class Participant(models.Model): wbw_list = models.ManyToManyField(Wbw_list, through='Participation') wbw_id = models.IntegerField(unique=True) class Participation(models.Model): name = models.CharField(max_length=200) wbw_list = models.ForeignKey(Wbw_list) participant = models.ForeignKey(Participant) def __str__(self): return self.name class Bystander(models.Model): name = models.CharField(max_length=200) participant = models.ForeignKey(Participant) class Meal(models.Model): price = models.IntegerField(default=0) date = models.DateTimeField(auto_now=True) completed = models.BooleanField(default=False) description = models.CharField(max_length=200, blank=True) wbw_list = models.ForeignKey(Wbw_list, null=True) participants = models.ManyToManyField(Participant, blank=True) bystanders = models.ManyToManyField(Bystander, blank=True) payer = models.ForeignKey(Participant, null=True, related_name='paymeal') ## Instruction: Allow meals with unknown payer ## Code After: from django.db import models class Wbw_list(models.Model): list_id = models.IntegerField(unique=True) name = models.CharField(max_length=200, blank=True) def __str__(self): return self.name class Participant(models.Model): wbw_list = models.ManyToManyField(Wbw_list, through='Participation') wbw_id = models.IntegerField(unique=True) class Participation(models.Model): name = models.CharField(max_length=200) wbw_list = models.ForeignKey(Wbw_list) participant = models.ForeignKey(Participant) def __str__(self): return self.name class Bystander(models.Model): name = models.CharField(max_length=200) participant = models.ForeignKey(Participant) class Meal(models.Model): price = models.IntegerField(default=0) date = models.DateTimeField(auto_now=True) completed = models.BooleanField(default=False) description = models.CharField(max_length=200, blank=True) wbw_list = models.ForeignKey(Wbw_list, null=True) participants = models.ManyToManyField(Participant, blank=True) bystanders = models.ManyToManyField(Bystander, blank=True) payer = models.ForeignKey(Participant, null=True, blank=True, related_name='paymeal')
... wbw_list = models.ForeignKey(Wbw_list, null=True) participants = models.ManyToManyField(Participant, blank=True) bystanders = models.ManyToManyField(Bystander, blank=True) payer = models.ForeignKey(Participant, null=True, blank=True, related_name='paymeal') ...
8b0dcf1bfda26ab9463d2c5a892b7ffd3fa015d9
packs/github/actions/lib/formatters.py
packs/github/actions/lib/formatters.py
__all__ = [ 'issue_to_dict' ] def issue_to_dict(issue): result = {} if issue.closed_by: closed_by = issue.closed_by.name else: closed_by = None result['id'] = issue.id result['repository'] = issue.repository.name result['title'] = issue.title result['body'] = issue.body result['url'] = issue.html_url result['state'] = issue.state result['labels'] = issue.labels result['created_at'] = issue.created_at result['closed_at'] = issue.closed_at result['closed_by'] = closed_by return result
__all__ = [ 'issue_to_dict', 'label_to_dict' ] def issue_to_dict(issue): result = {} if issue.closed_by: closed_by = issue.closed_by.name else: closed_by = None result['id'] = issue.id result['repository'] = issue.repository.name result['title'] = issue.title result['body'] = issue.body result['url'] = issue.html_url result['state'] = issue.state if issue.labels: labels = [label_to_dict(label) for label in issue.labels] else: labels = [] result['labels'] = labels result['created_at'] = issue.created_at result['closed_at'] = issue.closed_at result['closed_by'] = closed_by return result def label_to_dict(label): result = {} result['name'] = label.name result['color'] = label.color result['url'] = label.url return result
Make sure we flatten the labels attribute to a serializable simple type.
Make sure we flatten the labels attribute to a serializable simple type.
Python
apache-2.0
pearsontechnology/st2contrib,pidah/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,psychopenguin/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,pidah/st2contrib,armab/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,armab/st2contrib
python
## Code Before: __all__ = [ 'issue_to_dict' ] def issue_to_dict(issue): result = {} if issue.closed_by: closed_by = issue.closed_by.name else: closed_by = None result['id'] = issue.id result['repository'] = issue.repository.name result['title'] = issue.title result['body'] = issue.body result['url'] = issue.html_url result['state'] = issue.state result['labels'] = issue.labels result['created_at'] = issue.created_at result['closed_at'] = issue.closed_at result['closed_by'] = closed_by return result ## Instruction: Make sure we flatten the labels attribute to a serializable simple type. ## Code After: __all__ = [ 'issue_to_dict', 'label_to_dict' ] def issue_to_dict(issue): result = {} if issue.closed_by: closed_by = issue.closed_by.name else: closed_by = None result['id'] = issue.id result['repository'] = issue.repository.name result['title'] = issue.title result['body'] = issue.body result['url'] = issue.html_url result['state'] = issue.state if issue.labels: labels = [label_to_dict(label) for label in issue.labels] else: labels = [] result['labels'] = labels result['created_at'] = issue.created_at result['closed_at'] = issue.closed_at result['closed_by'] = closed_by return result def label_to_dict(label): result = {} result['name'] = label.name result['color'] = label.color result['url'] = label.url return result
... __all__ = [ 'issue_to_dict', 'label_to_dict' ] ... result['body'] = issue.body result['url'] = issue.html_url result['state'] = issue.state if issue.labels: labels = [label_to_dict(label) for label in issue.labels] else: labels = [] result['labels'] = labels result['created_at'] = issue.created_at result['closed_at'] = issue.closed_at result['closed_by'] = closed_by return result def label_to_dict(label): result = {} result['name'] = label.name result['color'] = label.color result['url'] = label.url return result ...
1cff28b9612c156363ed87cdde1718ee83b65776
real_estate_agency/resale/serializers.py
real_estate_agency/resale/serializers.py
from rest_framework import serializers from .models import ResaleApartment, ResaleApartmentImage class ResaleApartmentImageSerializer(serializers.ModelSerializer): class Meta: model = ResaleApartmentImage fields = '__all__' class ResaleApartmentSerializer(serializers.ModelSerializer): # images = ResaleApartmentImageSerializer(source='photos', many=True) get_building_type_display = serializers.ReadOnlyField() price_per_square_meter = serializers.ReadOnlyField() neighbourhood = serializers.StringRelatedField() class Meta: model = ResaleApartment fields = ( 'total_area', 'address', 'floor', 'number_of_storeys', # 'images', 'full_price', 'old_price', 'price_per_square_meter', 'neighbourhood', 'get_building_type_display', 'number_of_storeys', 'date_of_construction', 'celling_height', 'decoration', 'kitchen_area', 'balcony_area', 'id', )
from rest_framework import serializers from .models import ResaleApartment, ResaleApartmentImage class ResaleApartmentImageSerializer(serializers.ModelSerializer): class Meta: model = ResaleApartmentImage fields = '__all__' class ResaleApartmentSerializer(serializers.ModelSerializer): # images = ResaleApartmentImageSerializer(source='photos', many=True) get_building_type_display = serializers.ReadOnlyField() price_per_square_meter = serializers.ReadOnlyField() neighbourhood = serializers.StringRelatedField() decoration = serializers.ReadOnlyField(source='decoration.name') class Meta: model = ResaleApartment fields = ( 'total_area', 'address', 'floor', 'number_of_storeys', # 'images', 'full_price', 'old_price', 'price_per_square_meter', 'neighbourhood', 'get_building_type_display', 'number_of_storeys', 'date_of_construction', 'celling_height', 'decoration', 'kitchen_area', 'balcony_area', 'id', )
Make ResaleApartmentSerializer return Decoration.name on decoration field.
Make ResaleApartmentSerializer return Decoration.name on decoration field. It allows to show readable value at resale detailed page.
Python
mit
Dybov/real_estate_agency,Dybov/real_estate_agency,Dybov/real_estate_agency
python
## Code Before: from rest_framework import serializers from .models import ResaleApartment, ResaleApartmentImage class ResaleApartmentImageSerializer(serializers.ModelSerializer): class Meta: model = ResaleApartmentImage fields = '__all__' class ResaleApartmentSerializer(serializers.ModelSerializer): # images = ResaleApartmentImageSerializer(source='photos', many=True) get_building_type_display = serializers.ReadOnlyField() price_per_square_meter = serializers.ReadOnlyField() neighbourhood = serializers.StringRelatedField() class Meta: model = ResaleApartment fields = ( 'total_area', 'address', 'floor', 'number_of_storeys', # 'images', 'full_price', 'old_price', 'price_per_square_meter', 'neighbourhood', 'get_building_type_display', 'number_of_storeys', 'date_of_construction', 'celling_height', 'decoration', 'kitchen_area', 'balcony_area', 'id', ) ## Instruction: Make ResaleApartmentSerializer return Decoration.name on decoration field. It allows to show readable value at resale detailed page. ## Code After: from rest_framework import serializers from .models import ResaleApartment, ResaleApartmentImage class ResaleApartmentImageSerializer(serializers.ModelSerializer): class Meta: model = ResaleApartmentImage fields = '__all__' class ResaleApartmentSerializer(serializers.ModelSerializer): # images = ResaleApartmentImageSerializer(source='photos', many=True) get_building_type_display = serializers.ReadOnlyField() price_per_square_meter = serializers.ReadOnlyField() neighbourhood = serializers.StringRelatedField() decoration = serializers.ReadOnlyField(source='decoration.name') class Meta: model = ResaleApartment fields = ( 'total_area', 'address', 'floor', 'number_of_storeys', # 'images', 'full_price', 'old_price', 'price_per_square_meter', 'neighbourhood', 'get_building_type_display', 'number_of_storeys', 'date_of_construction', 'celling_height', 'decoration', 'kitchen_area', 'balcony_area', 'id', )
... get_building_type_display = serializers.ReadOnlyField() price_per_square_meter = serializers.ReadOnlyField() neighbourhood = serializers.StringRelatedField() decoration = serializers.ReadOnlyField(source='decoration.name') class Meta: model = ResaleApartment ...
020015cccceb3c2391c4764ee2ec29dfc5c461c6
__init__.py
__init__.py
from . import LayerView def getMetaData(): return { "name": "LayerView", "type": "View" } def register(app): app.getController().addView("LayerView", LayerView.LayerView())
from . import LayerView def getMetaData(): return { "name": "LayerView", "type": "View" } def register(app): return LayerView.LayerView()
Update plugin's register functions to return the object instance instead of performing the registration themselves
Update plugin's register functions to return the object instance instead of performing the registration themselves
Python
agpl-3.0
Curahelper/Cura,bq/Ultimaker-Cura,ad1217/Cura,bq/Ultimaker-Cura,senttech/Cura,lo0ol/Ultimaker-Cura,quillford/Cura,derekhe/Cura,ynotstartups/Wanhao,markwal/Cura,lo0ol/Ultimaker-Cura,senttech/Cura,DeskboxBrazil/Cura,ynotstartups/Wanhao,totalretribution/Cura,ad1217/Cura,fieldOfView/Cura,quillford/Cura,fxtentacle/Cura,derekhe/Cura,hmflash/Cura,DeskboxBrazil/Cura,fieldOfView/Cura,totalretribution/Cura,Curahelper/Cura,markwal/Cura,fxtentacle/Cura,hmflash/Cura
python
## Code Before: from . import LayerView def getMetaData(): return { "name": "LayerView", "type": "View" } def register(app): app.getController().addView("LayerView", LayerView.LayerView()) ## Instruction: Update plugin's register functions to return the object instance instead of performing the registration themselves ## Code After: from . import LayerView def getMetaData(): return { "name": "LayerView", "type": "View" } def register(app): return LayerView.LayerView()
... return { "name": "LayerView", "type": "View" } def register(app): return LayerView.LayerView() ...
30994f2e2dacc4a4e73d0b2eef1251fd59ff910c
src/Game.java
src/Game.java
import java.util.List; /** * Created by nate on 4/10/16. */ public class Game extends Model { public int id; public String name; public float price; public List<Category> categories() { return null; } public List<Review> reviews() { return null; } }
import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; /** * Created by nate on 4/10/16. */ public class Game extends Model { public int id; public String name; public float price; public List<Category> categories() { try { List<Category> categories = new ArrayList<Category>(); Category cat; PreparedStatement s = c.prepareStatement( "SELECT id, name from game_category join category on id=category_id where game_id=?" ); s.setInt(1, id); ResultSet rs = s.executeQuery(); while (rs.next()) { cat = new Category(); cat.id = rs.getInt("id"); cat.name = rs.getString("name"); categories.add(cat); } return categories; } catch (SQLException e) { e.printStackTrace(); return null; } } public List<Review> reviews() { try { List<Review> reviews = new ArrayList<>(); UserReview ur; CriticReview cr; User u; PreparedStatement s = c.prepareStatement( "SELECT rating, text, real_name, profile_name, credit_card, level, phone, user.id" + "from user_review" + "join user on user.id=user_review.user_id" + "where game_id=?" ); s.setInt(1, id); ResultSet rs = s.executeQuery(); while (rs.next()) { ur = new UserReview(); ur.rating = rs.getInt("id"); ur.text = rs.getString("text"); ur.game = this; u = new User(); ur.user = u; u.realName = rs.getString("real_name"); u.creditCard = rs.getString("credit_card"); u.level = rs.getInt("level"); u.phone = rs.getString("phone"); u.id = rs.getInt("user.id"); reviews.add(ur); } s = c.prepareStatement("SELECT rating, text, company, link from critic_review where game_id=?"); s.setInt(1, id); rs = s.executeQuery(); while (rs.next()) { cr = new CriticReview(); cr.game = this; cr.company = rs.getString("company"); cr.rating = rs.getFloat("rating"); cr.link = rs.getString("link"); reviews.add(cr); } return reviews; } catch (SQLException e) { e.printStackTrace(); return null; } } }
Add query methods to game class
Add query methods to game class
Java
mit
natemara/databases-project,natemara/databases-project
java
## Code Before: import java.util.List; /** * Created by nate on 4/10/16. */ public class Game extends Model { public int id; public String name; public float price; public List<Category> categories() { return null; } public List<Review> reviews() { return null; } } ## Instruction: Add query methods to game class ## Code After: import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; /** * Created by nate on 4/10/16. */ public class Game extends Model { public int id; public String name; public float price; public List<Category> categories() { try { List<Category> categories = new ArrayList<Category>(); Category cat; PreparedStatement s = c.prepareStatement( "SELECT id, name from game_category join category on id=category_id where game_id=?" ); s.setInt(1, id); ResultSet rs = s.executeQuery(); while (rs.next()) { cat = new Category(); cat.id = rs.getInt("id"); cat.name = rs.getString("name"); categories.add(cat); } return categories; } catch (SQLException e) { e.printStackTrace(); return null; } } public List<Review> reviews() { try { List<Review> reviews = new ArrayList<>(); UserReview ur; CriticReview cr; User u; PreparedStatement s = c.prepareStatement( "SELECT rating, text, real_name, profile_name, credit_card, level, phone, user.id" + "from user_review" + "join user on user.id=user_review.user_id" + "where game_id=?" ); s.setInt(1, id); ResultSet rs = s.executeQuery(); while (rs.next()) { ur = new UserReview(); ur.rating = rs.getInt("id"); ur.text = rs.getString("text"); ur.game = this; u = new User(); ur.user = u; u.realName = rs.getString("real_name"); u.creditCard = rs.getString("credit_card"); u.level = rs.getInt("level"); u.phone = rs.getString("phone"); u.id = rs.getInt("user.id"); reviews.add(ur); } s = c.prepareStatement("SELECT rating, text, company, link from critic_review where game_id=?"); s.setInt(1, id); rs = s.executeQuery(); while (rs.next()) { cr = new CriticReview(); cr.game = this; cr.company = rs.getString("company"); cr.rating = rs.getFloat("rating"); cr.link = rs.getString("link"); reviews.add(cr); } return reviews; } catch (SQLException e) { e.printStackTrace(); return null; } } }
# ... existing code ... import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; /** # ... modified code ... public float price; public List<Category> categories() { try { List<Category> categories = new ArrayList<Category>(); Category cat; PreparedStatement s = c.prepareStatement( "SELECT id, name from game_category join category on id=category_id where game_id=?" ); s.setInt(1, id); ResultSet rs = s.executeQuery(); while (rs.next()) { cat = new Category(); cat.id = rs.getInt("id"); cat.name = rs.getString("name"); categories.add(cat); } return categories; } catch (SQLException e) { e.printStackTrace(); return null; } } public List<Review> reviews() { try { List<Review> reviews = new ArrayList<>(); UserReview ur; CriticReview cr; User u; PreparedStatement s = c.prepareStatement( "SELECT rating, text, real_name, profile_name, credit_card, level, phone, user.id" + "from user_review" + "join user on user.id=user_review.user_id" + "where game_id=?" ); s.setInt(1, id); ResultSet rs = s.executeQuery(); while (rs.next()) { ur = new UserReview(); ur.rating = rs.getInt("id"); ur.text = rs.getString("text"); ur.game = this; u = new User(); ur.user = u; u.realName = rs.getString("real_name"); u.creditCard = rs.getString("credit_card"); u.level = rs.getInt("level"); u.phone = rs.getString("phone"); u.id = rs.getInt("user.id"); reviews.add(ur); } s = c.prepareStatement("SELECT rating, text, company, link from critic_review where game_id=?"); s.setInt(1, id); rs = s.executeQuery(); while (rs.next()) { cr = new CriticReview(); cr.game = this; cr.company = rs.getString("company"); cr.rating = rs.getFloat("rating"); cr.link = rs.getString("link"); reviews.add(cr); } return reviews; } catch (SQLException e) { e.printStackTrace(); return null; } } } # ... rest of the code ...
0de68abab608680d4ef390fea572828fb12b6abd
rounding/common.py
rounding/common.py
''' Created on Oct 6, 2013 @author: dmaust ''' import math class RounderBase(object): ''' Abstract base class for rounding ''' def __init__(self, precision=0): ''' Constructor ''' self.precision = precision self.cumulative_error = 0 def _get_fraction(self, x): scale = 10.0**self.precision scaled_x = x * scale fraction = scaled_x - math.floor(scaled_x) return fraction, scaled_x, scale def _record_roundoff_error(self, x, result): self.cumulative_error += result - x @property def roundoff_error(self): return self.cumulative_error
''' Created on Oct 6, 2013 @author: dmaust ''' import math class RounderBase(object): ''' Abstract base class for rounding ''' def __init__(self, precision=0): ''' Constructor ''' self.precision = precision self.cumulative_error = 0 self.count = 0 def _get_fraction(self, x): scale = 10.0**self.precision scaled_x = x * scale fraction = scaled_x - math.floor(scaled_x) return fraction, scaled_x, scale def _record_roundoff_error(self, x, result): self.cumulative_error += result - x self.count += 1 @property def roundoff_error(self): return self.cumulative_error @property def average_roundoff(self): return self.cumulative_error / self.count
Add average_roundoff to rounding base class.
Add average_roundoff to rounding base class.
Python
apache-2.0
dmaust/rounding
python
## Code Before: ''' Created on Oct 6, 2013 @author: dmaust ''' import math class RounderBase(object): ''' Abstract base class for rounding ''' def __init__(self, precision=0): ''' Constructor ''' self.precision = precision self.cumulative_error = 0 def _get_fraction(self, x): scale = 10.0**self.precision scaled_x = x * scale fraction = scaled_x - math.floor(scaled_x) return fraction, scaled_x, scale def _record_roundoff_error(self, x, result): self.cumulative_error += result - x @property def roundoff_error(self): return self.cumulative_error ## Instruction: Add average_roundoff to rounding base class. ## Code After: ''' Created on Oct 6, 2013 @author: dmaust ''' import math class RounderBase(object): ''' Abstract base class for rounding ''' def __init__(self, precision=0): ''' Constructor ''' self.precision = precision self.cumulative_error = 0 self.count = 0 def _get_fraction(self, x): scale = 10.0**self.precision scaled_x = x * scale fraction = scaled_x - math.floor(scaled_x) return fraction, scaled_x, scale def _record_roundoff_error(self, x, result): self.cumulative_error += result - x self.count += 1 @property def roundoff_error(self): return self.cumulative_error @property def average_roundoff(self): return self.cumulative_error / self.count
# ... existing code ... ''' self.precision = precision self.cumulative_error = 0 self.count = 0 def _get_fraction(self, x): # ... modified code ... def _record_roundoff_error(self, x, result): self.cumulative_error += result - x self.count += 1 @property def roundoff_error(self): return self.cumulative_error @property def average_roundoff(self): return self.cumulative_error / self.count # ... rest of the code ...
e29f250286411c0e1c6f084f9e3f1ab4cbdfa6ec
setup.py
setup.py
from auto_version import calculate_version, build_py_copy_version def configuration(parent_package='', top_path=None): import numpy from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') from numpy.distutils.misc_util import Configuration # if(os.environ.get('THIS_IS_TRAVIS') is not None): # print("This appears to be Travis!") # compile_args = ['-O3'] # else: # compile_args = ['-ffast-math', '-O3'] compile_args = ['-O3'] config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(configuration=configuration, version=calculate_version(), cmdclass={'build_py': build_py_copy_version},)
from auto_version import calculate_version, build_py_copy_version def configuration(parent_package='', top_path=None): import numpy from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') from numpy.distutils.misc_util import Configuration # if(os.environ.get('THIS_IS_TRAVIS') is not None): # print("This appears to be Travis!") # compile_args = ['-O3'] # else: # compile_args = ['-ffast-math', '-O3'] compile_args = ['-O3'] config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(configuration=configuration, version=calculate_version(), cmdclass={'build_py': build_py_copy_version},)
Make sure the code is rebuilt if quaternion.h changes
Make sure the code is rebuilt if quaternion.h changes
Python
mit
moble/quaternion,moble/quaternion
python
## Code Before: from auto_version import calculate_version, build_py_copy_version def configuration(parent_package='', top_path=None): import numpy from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') from numpy.distutils.misc_util import Configuration # if(os.environ.get('THIS_IS_TRAVIS') is not None): # print("This appears to be Travis!") # compile_args = ['-O3'] # else: # compile_args = ['-ffast-math', '-O3'] compile_args = ['-O3'] config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(configuration=configuration, version=calculate_version(), cmdclass={'build_py': build_py_copy_version},) ## Instruction: Make sure the code is rebuilt if quaternion.h changes ## Code After: from auto_version import calculate_version, build_py_copy_version def configuration(parent_package='', top_path=None): import numpy from distutils.errors import DistutilsError if numpy.__dict__.get('quaternion') is not None: raise DistutilsError('The target NumPy already has a quaternion type') from numpy.distutils.misc_util import Configuration # if(os.environ.get('THIS_IS_TRAVIS') is not None): # print("This appears to be Travis!") # compile_args = ['-O3'] # else: # compile_args = ['-ffast-math', '-O3'] compile_args = ['-O3'] config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(configuration=configuration, version=calculate_version(), cmdclass={'build_py': build_py_copy_version},)
# ... existing code ... config = Configuration('quaternion', parent_package, top_path) config.add_extension('numpy_quaternion', ['quaternion.c', 'numpy_quaternion.c'], depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'], extra_compile_args=compile_args, ) return config # ... rest of the code ...
23d4b9e6bd6b60ffd76adc2deeefc7e2d47ad94c
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/host/CommitNetworkChangesCommand.java
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/host/CommitNetworkChangesCommand.java
package org.ovirt.engine.core.bll.network.host; import org.ovirt.engine.core.bll.Backend; import org.ovirt.engine.core.bll.VdsCommand; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.VdsActionParameters; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.common.vdscommands.VDSReturnValue; import org.ovirt.engine.core.common.vdscommands.VdsIdVDSCommandParametersBase; @SuppressWarnings("serial") public class CommitNetworkChangesCommand<T extends VdsActionParameters> extends VdsCommand<T> { public CommitNetworkChangesCommand(T param) { super(param); } @Override protected void executeCommand() { VDSReturnValue retVal = Backend .getInstance() .getResourceManager() .RunVdsCommand(VDSCommandType.SetSafeNetworkConfig, new VdsIdVDSCommandParametersBase(getParameters().getVdsId())); getVds().setNetConfigDirty(false); getDbFacade().getVdsDynamicDao().update(getVds().getDynamicData()); setSucceeded(retVal.getSucceeded()); } @Override protected boolean canDoAction() { return true; } @Override public AuditLogType getAuditLogTypeValue() { return getSucceeded() ? AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES : AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES_FAILED; } }
package org.ovirt.engine.core.bll.network.host; import org.ovirt.engine.core.bll.VdsCommand; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.VdsActionParameters; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.common.vdscommands.VDSReturnValue; import org.ovirt.engine.core.common.vdscommands.VdsIdVDSCommandParametersBase; @SuppressWarnings("serial") public class CommitNetworkChangesCommand<T extends VdsActionParameters> extends VdsCommand<T> { public CommitNetworkChangesCommand(T param) { super(param); } @Override protected void executeCommand() { VDSReturnValue retVal = runVdsCommand(VDSCommandType.SetSafeNetworkConfig, new VdsIdVDSCommandParametersBase(getParameters().getVdsId())); getDbFacade().getVdsDynamicDao().updateNetConfigDirty(getParameters().getVdsId(), false); setSucceeded(retVal.getSucceeded()); } @Override protected boolean canDoAction() { return true; } @Override public AuditLogType getAuditLogTypeValue() { return getSucceeded() ? AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES : AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES_FAILED; } }
Use proper method to mark net changes as saved
engine: Use proper method to mark net changes as saved The patch reduces the need for updating the entire VdsDynamic data by using a designated method for saving the dirty-network indicator of the host. Change-Id: Ib4c54214a275a1653c46bca7c95ff847975dd7d4 Signed-off-by: Moti Asayag <[email protected]>
Java
apache-2.0
OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,halober/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,halober/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,yapengsong/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,walteryang47/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,halober/ovirt-engine,OpenUniversity/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,halober/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine
java
## Code Before: package org.ovirt.engine.core.bll.network.host; import org.ovirt.engine.core.bll.Backend; import org.ovirt.engine.core.bll.VdsCommand; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.VdsActionParameters; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.common.vdscommands.VDSReturnValue; import org.ovirt.engine.core.common.vdscommands.VdsIdVDSCommandParametersBase; @SuppressWarnings("serial") public class CommitNetworkChangesCommand<T extends VdsActionParameters> extends VdsCommand<T> { public CommitNetworkChangesCommand(T param) { super(param); } @Override protected void executeCommand() { VDSReturnValue retVal = Backend .getInstance() .getResourceManager() .RunVdsCommand(VDSCommandType.SetSafeNetworkConfig, new VdsIdVDSCommandParametersBase(getParameters().getVdsId())); getVds().setNetConfigDirty(false); getDbFacade().getVdsDynamicDao().update(getVds().getDynamicData()); setSucceeded(retVal.getSucceeded()); } @Override protected boolean canDoAction() { return true; } @Override public AuditLogType getAuditLogTypeValue() { return getSucceeded() ? AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES : AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES_FAILED; } } ## Instruction: engine: Use proper method to mark net changes as saved The patch reduces the need for updating the entire VdsDynamic data by using a designated method for saving the dirty-network indicator of the host. Change-Id: Ib4c54214a275a1653c46bca7c95ff847975dd7d4 Signed-off-by: Moti Asayag <[email protected]> ## Code After: package org.ovirt.engine.core.bll.network.host; import org.ovirt.engine.core.bll.VdsCommand; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.VdsActionParameters; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.common.vdscommands.VDSReturnValue; import org.ovirt.engine.core.common.vdscommands.VdsIdVDSCommandParametersBase; @SuppressWarnings("serial") public class CommitNetworkChangesCommand<T extends VdsActionParameters> extends VdsCommand<T> { public CommitNetworkChangesCommand(T param) { super(param); } @Override protected void executeCommand() { VDSReturnValue retVal = runVdsCommand(VDSCommandType.SetSafeNetworkConfig, new VdsIdVDSCommandParametersBase(getParameters().getVdsId())); getDbFacade().getVdsDynamicDao().updateNetConfigDirty(getParameters().getVdsId(), false); setSucceeded(retVal.getSucceeded()); } @Override protected boolean canDoAction() { return true; } @Override public AuditLogType getAuditLogTypeValue() { return getSucceeded() ? AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES : AuditLogType.NETWORK_COMMINT_NETWORK_CHANGES_FAILED; } }
# ... existing code ... package org.ovirt.engine.core.bll.network.host; import org.ovirt.engine.core.bll.VdsCommand; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.VdsActionParameters; # ... modified code ... @Override protected void executeCommand() { VDSReturnValue retVal = runVdsCommand(VDSCommandType.SetSafeNetworkConfig, new VdsIdVDSCommandParametersBase(getParameters().getVdsId())); getDbFacade().getVdsDynamicDao().updateNetConfigDirty(getParameters().getVdsId(), false); setSucceeded(retVal.getSucceeded()); } # ... rest of the code ...
5fb365333711f7e999f71d53061ae14c386e575c
src/waldur_core/core/api_groups_mapping.py
src/waldur_core/core/api_groups_mapping.py
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], }
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
Add accounting group to apidocs
Add accounting group to apidocs
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
python
## Code Before: API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], } ## Instruction: Add accounting group to apidocs ## Code After: API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
# ... existing code ... '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], } # ... rest of the code ...
9f9357bc46f813cd8a26a5f14bba5364aa4a4c10
rx/core/operators/contains.py
rx/core/operators/contains.py
from typing import Callable, Optional, TypeVar from rx import operators as ops from rx.core import Observable, pipe, typing from rx.internal.basic import default_comparer _T = TypeVar("_T") def contains_( value: _T, comparer: Optional[typing.Comparer[_T]] = None ) -> Callable[[Observable[_T]], Observable[bool]]: comparer_ = comparer or default_comparer filtering = ops.filter(lambda v: comparer_(v, value)) something = ops.some() return pipe(filtering, something) __all__ = ["contains_"]
from typing import Callable, Optional, TypeVar from rx import operators as ops from rx.core import Observable, pipe, typing from rx.internal.basic import default_comparer _T = TypeVar("_T") def contains_( value: _T, comparer: Optional[typing.Comparer[_T]] = None ) -> Callable[[Observable[_T]], Observable[bool]]: comparer_ = comparer or default_comparer def predicate(v: _T) -> bool: return comparer_(v, value) filtering = ops.filter(predicate) something = ops.some() return pipe(filtering, something) __all__ = ["contains_"]
Use typed function instead of lambda
Use typed function instead of lambda
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY
python
## Code Before: from typing import Callable, Optional, TypeVar from rx import operators as ops from rx.core import Observable, pipe, typing from rx.internal.basic import default_comparer _T = TypeVar("_T") def contains_( value: _T, comparer: Optional[typing.Comparer[_T]] = None ) -> Callable[[Observable[_T]], Observable[bool]]: comparer_ = comparer or default_comparer filtering = ops.filter(lambda v: comparer_(v, value)) something = ops.some() return pipe(filtering, something) __all__ = ["contains_"] ## Instruction: Use typed function instead of lambda ## Code After: from typing import Callable, Optional, TypeVar from rx import operators as ops from rx.core import Observable, pipe, typing from rx.internal.basic import default_comparer _T = TypeVar("_T") def contains_( value: _T, comparer: Optional[typing.Comparer[_T]] = None ) -> Callable[[Observable[_T]], Observable[bool]]: comparer_ = comparer or default_comparer def predicate(v: _T) -> bool: return comparer_(v, value) filtering = ops.filter(predicate) something = ops.some() return pipe(filtering, something) __all__ = ["contains_"]
// ... existing code ... ) -> Callable[[Observable[_T]], Observable[bool]]: comparer_ = comparer or default_comparer def predicate(v: _T) -> bool: return comparer_(v, value) filtering = ops.filter(predicate) something = ops.some() return pipe(filtering, something) // ... rest of the code ...
4e2fd123b77572bdf74938d08f3e84ccfa15af36
pycargr/cli.py
pycargr/cli.py
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
Support both json and stdout the same
Support both json and stdout the same
Python
mit
Florents-Tselai/PyCarGr
python
## Code Before: import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main() ## Instruction: Support both json and stdout the same ## Code After: import csv from argparse import ArgumentParser from json import dumps from pycargr.model import to_dict from pycargr.parser import parse_car_page parser = ArgumentParser() parser.add_argument('car_ids', nargs='+') parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout') def main(): args = parser.parse_args() car_ids = args.car_ids output = args.output results = [] for cid in car_ids: results.append(to_dict(parse_car_page(cid))) if output == 'csv': with open('data.csv', 'w') as f: writer = csv.DictWriter(f, fieldnames=results[0].keys()) writer.writeheader() for d in results: # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': main()
... # images is a list - not suitable for csv d.pop('images') writer.writerow(d) elif output == 'json' or output == 'stdout': print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False)) if __name__ == '__main__': ...
a94a8f0e5c773995da710bb8e90839c7b697db96
cobe/tokenizer.py
cobe/tokenizer.py
import re class MegaHALTokenizer: def split(self, phrase): if len(phrase) == 0: return [] # add ending punctuation if it is missing if phrase[-1] not in ".!?": phrase = phrase + "." # megahal traditionally considers [a-z0-9] as word characters. # Let's see what happens if we add [_'] words = re.findall("([\w']+|[^\w']+)", phrase.upper()) return words
import re class MegaHALTokenizer: def split(self, phrase): if len(phrase) == 0: return [] # add ending punctuation if it is missing if phrase[-1] not in ".!?": phrase = phrase + "." # megahal traditionally considers [a-z0-9] as word characters. # Let's see what happens if we add [_'] words = re.findall("([\w']+|[^\w']+)", phrase.upper(), re.UNICODE) return words
Use the re.UNICODE flag (i.e., Python character tables) in findall()
Use the re.UNICODE flag (i.e., Python character tables) in findall()
Python
mit
LeMagnesium/cobe,meska/cobe,wodim/cobe-ng,wodim/cobe-ng,LeMagnesium/cobe,pteichman/cobe,tiagochiavericosta/cobe,pteichman/cobe,meska/cobe,DarkMio/cobe,tiagochiavericosta/cobe,DarkMio/cobe
python
## Code Before: import re class MegaHALTokenizer: def split(self, phrase): if len(phrase) == 0: return [] # add ending punctuation if it is missing if phrase[-1] not in ".!?": phrase = phrase + "." # megahal traditionally considers [a-z0-9] as word characters. # Let's see what happens if we add [_'] words = re.findall("([\w']+|[^\w']+)", phrase.upper()) return words ## Instruction: Use the re.UNICODE flag (i.e., Python character tables) in findall() ## Code After: import re class MegaHALTokenizer: def split(self, phrase): if len(phrase) == 0: return [] # add ending punctuation if it is missing if phrase[-1] not in ".!?": phrase = phrase + "." # megahal traditionally considers [a-z0-9] as word characters. # Let's see what happens if we add [_'] words = re.findall("([\w']+|[^\w']+)", phrase.upper(), re.UNICODE) return words
# ... existing code ... # megahal traditionally considers [a-z0-9] as word characters. # Let's see what happens if we add [_'] words = re.findall("([\w']+|[^\w']+)", phrase.upper(), re.UNICODE) return words # ... rest of the code ...
52c359c1348b9c21f7c47917d024d7c161652b43
webapp/thing_test.py
webapp/thing_test.py
from thing import PiThing # Instantiate a PiThing pi_thing = PiThing() # Get the current switch state switch = pi_thing.read_switch() print('Switch: {0}'.format(switch))
from thing import PiThing # Instantiate a PiThing pi_thing = PiThing() # Get the current switch state switch = pi_thing.read_switch() print('Switch: {0}'.format(switch)) # Blink the LED forever. print('Blinking LED (Ctrl-C to stop)...') while True: pi_thing.set_led(True) time.sleep(0.5) pi_thing.set_led(False) time.sleep(0.5)
Add blink LED. TODO: Test on raspberry pi hardware.
Add blink LED. TODO: Test on raspberry pi hardware.
Python
mit
beepscore/pi_thing,beepscore/pi_thing,beepscore/pi_thing
python
## Code Before: from thing import PiThing # Instantiate a PiThing pi_thing = PiThing() # Get the current switch state switch = pi_thing.read_switch() print('Switch: {0}'.format(switch)) ## Instruction: Add blink LED. TODO: Test on raspberry pi hardware. ## Code After: from thing import PiThing # Instantiate a PiThing pi_thing = PiThing() # Get the current switch state switch = pi_thing.read_switch() print('Switch: {0}'.format(switch)) # Blink the LED forever. print('Blinking LED (Ctrl-C to stop)...') while True: pi_thing.set_led(True) time.sleep(0.5) pi_thing.set_led(False) time.sleep(0.5)
... # Get the current switch state switch = pi_thing.read_switch() print('Switch: {0}'.format(switch)) # Blink the LED forever. print('Blinking LED (Ctrl-C to stop)...') while True: pi_thing.set_led(True) time.sleep(0.5) pi_thing.set_led(False) time.sleep(0.5) ...
9d78bc8bbe8d0065debd8b4e5e72ed73f135ed63
linter.py
linter.py
"""This module exports the Bashate plugin class.""" from SublimeLinter.lint import Linter import os class Bashate(Linter): """Provides an interface to bashate.""" cmd = 'bashate' comment_re = r'\s*#' regex = ( r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)' ) defaults = { 'selector': 'source.shell.bash', '--ignore=,': '', '--warn=,': '', '--error=,': '' } tempfile_suffix = 'sh' check_version = False def tmpfile(self, cmd, code, suffix=''): """ Run an external executable using a temp file to pass code and return its output. We override this to have the tmpfile extension match what is being linted so E005 is valid. """ filename, extension = os.path.splitext(self.filename) extension = '.missingextension' if not extension else extension return super().tmpfile(cmd, code, extension)
"""This module exports the Bashate plugin class.""" from SublimeLinter.lint import Linter import os class Bashate(Linter): """Provides an interface to bashate.""" cmd = 'bashate' regex = ( r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)' ) defaults = { 'selector': 'source.shell.bash', '--ignore=,': '', '--warn=,': '', '--error=,': '' } tempfile_suffix = 'sh' def tmpfile(self, cmd, code, suffix=''): """ Run an external executable using a temp file to pass code and return its output. We override this to have the tmpfile extension match what is being linted so E005 is valid. """ filename, extension = os.path.splitext(self.filename) extension = '.missingextension' if not extension else extension return super().tmpfile(cmd, code, extension)
Remove deprecated attributes comment_re and check_version
Remove deprecated attributes comment_re and check_version
Python
mit
maristgeek/SublimeLinter-contrib-bashate
python
## Code Before: """This module exports the Bashate plugin class.""" from SublimeLinter.lint import Linter import os class Bashate(Linter): """Provides an interface to bashate.""" cmd = 'bashate' comment_re = r'\s*#' regex = ( r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)' ) defaults = { 'selector': 'source.shell.bash', '--ignore=,': '', '--warn=,': '', '--error=,': '' } tempfile_suffix = 'sh' check_version = False def tmpfile(self, cmd, code, suffix=''): """ Run an external executable using a temp file to pass code and return its output. We override this to have the tmpfile extension match what is being linted so E005 is valid. """ filename, extension = os.path.splitext(self.filename) extension = '.missingextension' if not extension else extension return super().tmpfile(cmd, code, extension) ## Instruction: Remove deprecated attributes comment_re and check_version ## Code After: """This module exports the Bashate plugin class.""" from SublimeLinter.lint import Linter import os class Bashate(Linter): """Provides an interface to bashate.""" cmd = 'bashate' regex = ( r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)' ) defaults = { 'selector': 'source.shell.bash', '--ignore=,': '', '--warn=,': '', '--error=,': '' } tempfile_suffix = 'sh' def tmpfile(self, cmd, code, suffix=''): """ Run an external executable using a temp file to pass code and return its output. We override this to have the tmpfile extension match what is being linted so E005 is valid. """ filename, extension = os.path.splitext(self.filename) extension = '.missingextension' if not extension else extension return super().tmpfile(cmd, code, extension)
# ... existing code ... """Provides an interface to bashate.""" cmd = 'bashate' regex = ( r'^.+:(?P<line>\d+):1: (?:(?P<error>E)|(?P<warning>W))\d{3} (?P<message>.+)' ) # ... modified code ... '--error=,': '' } tempfile_suffix = 'sh' def tmpfile(self, cmd, code, suffix=''): """ # ... rest of the code ...
fac8f1af6bd3eb46fe2a26689b0d85f358934f7a
network_checker/url_access_checker/cli.py
network_checker/url_access_checker/cli.py
import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
Add EDITOR variable in urlaccesschecker
Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686
Python
apache-2.0
prmtl/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-web,prmtl/fuel-web,eayunstack/fuel-web,stackforge/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-web,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-web,eayunstack/fuel-web,huntxu/fuel-web,nebril/fuel-web,prmtl/fuel-web,stackforge/fuel-web,stackforge/fuel-web,prmtl/fuel-web,nebril/fuel-web
python
## Code Before: import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) ## Instruction: Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686 ## Code After: import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
// ... existing code ... import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager // ... rest of the code ...
b1b0919f47f43d27bc409528617af8dbd4eea41c
tests/test_imports.py
tests/test_imports.py
import unittest class TestImport(unittest.TestCase): # Basic import tests for packages without any. def test_basic(self): import bq_helper import cleverhans from rl.agents.dqn import DQNAgent
import unittest class TestImport(unittest.TestCase): # Basic import tests for packages without any. def test_basic(self): import bq_helper import cleverhans
Remove import test for keras-rl
Remove import test for keras-rl This package was removed in #747
Python
apache-2.0
Kaggle/docker-python,Kaggle/docker-python
python
## Code Before: import unittest class TestImport(unittest.TestCase): # Basic import tests for packages without any. def test_basic(self): import bq_helper import cleverhans from rl.agents.dqn import DQNAgent ## Instruction: Remove import test for keras-rl This package was removed in #747 ## Code After: import unittest class TestImport(unittest.TestCase): # Basic import tests for packages without any. def test_basic(self): import bq_helper import cleverhans
... def test_basic(self): import bq_helper import cleverhans ...
d78ff232acee51f95af3e815e72d3db32cb90533
apps/privatemsg/management/commands/cleanupprivatemsg.py
apps/privatemsg/management/commands/cleanupprivatemsg.py
from django.core.management.base import NoArgsCommand from apps.privatemsg.models import PrivateMessage class Command(NoArgsCommand): """ A management command which deletes deleted private messages from the database. Calls ``PrivateMessage.objects.delete_deleted_msg()``, which contains the actual logic for determining which messages are deleted. """ help = "Delete deleted private messages from the database" def handle_noargs(self, **options): """ Command handler. :param options: Not used. :return: None. """ PrivateMessage.objects.delete_deleted_msg()
from django.core.management.base import NoArgsCommand from ...models import PrivateMessage class Command(NoArgsCommand): """ A management command which deletes deleted private messages from the database. Calls ``PrivateMessage.objects.delete_deleted_msg()``, which contains the actual logic for determining which messages are deleted. """ help = "Delete deleted private messages from the database" def handle_noargs(self, **options): """ Command handler. :param options: Not used. :return: None. """ PrivateMessage.objects.delete_deleted_msg()
Use relative import and update docstring
Use relative import and update docstring
Python
agpl-3.0
TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker,TamiaLab/carnetdumaker
python
## Code Before: from django.core.management.base import NoArgsCommand from apps.privatemsg.models import PrivateMessage class Command(NoArgsCommand): """ A management command which deletes deleted private messages from the database. Calls ``PrivateMessage.objects.delete_deleted_msg()``, which contains the actual logic for determining which messages are deleted. """ help = "Delete deleted private messages from the database" def handle_noargs(self, **options): """ Command handler. :param options: Not used. :return: None. """ PrivateMessage.objects.delete_deleted_msg() ## Instruction: Use relative import and update docstring ## Code After: from django.core.management.base import NoArgsCommand from ...models import PrivateMessage class Command(NoArgsCommand): """ A management command which deletes deleted private messages from the database. Calls ``PrivateMessage.objects.delete_deleted_msg()``, which contains the actual logic for determining which messages are deleted. """ help = "Delete deleted private messages from the database" def handle_noargs(self, **options): """ Command handler. :param options: Not used. :return: None. """ PrivateMessage.objects.delete_deleted_msg()
# ... existing code ... from django.core.management.base import NoArgsCommand from ...models import PrivateMessage class Command(NoArgsCommand): # ... rest of the code ...
db9d79d0ea96b50f5707cc823d2b82653809312b
core/src/main/java/org/realityforge/arez/api2/Reaction.java
core/src/main/java/org/realityforge/arez/api2/Reaction.java
package org.realityforge.arez.api2; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class Reaction extends Observer { /** * Flag indicating whether this reaction has been scheduled. i.e. has a pending reaction. */ private boolean _scheduled; Reaction( @Nonnull final ArezContext context, @Nullable final String name ) { super( context, name ); setOnStale( this::schedule ); } final boolean isScheduled() { return _scheduled; } final void schedule() { Guards.invariant( this::isActive, () -> String.format( "Observer named '%s' is not active but an attempt has been made to schedule observer.", getName() ) ); if ( !_scheduled ) { _scheduled = true; getContext().scheduleReaction( this ); } } }
package org.realityforge.arez.api2; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class Reaction extends Observer { /** * Flag indicating whether this reaction has been scheduled. i.e. has a pending reaction. */ private boolean _scheduled; /** * The transaction mode in which the action executes. */ @Nonnull private final TransactionMode _mode; Reaction( @Nonnull final ArezContext context, @Nullable final String name, @Nonnull final TransactionMode mode ) { super( context, name ); setOnStale( this::schedule ); _mode = Objects.requireNonNull( mode ); } /** * Return the transaction mode in which the action executes. * * @return the transaction mode in which the action executes. */ @Nonnull final TransactionMode getMode() { return _mode; } final boolean isScheduled() { return _scheduled; } final void schedule() { Guards.invariant( this::isActive, () -> String.format( "Observer named '%s' is not active but an attempt has been made to schedule observer.", getName() ) ); if ( !_scheduled ) { _scheduled = true; getContext().scheduleReaction( this ); } } }
Add the transaction mode to the reaction
Add the transaction mode to the reaction
Java
apache-2.0
realityforge/arez,realityforge/arez,realityforge/arez
java
## Code Before: package org.realityforge.arez.api2; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class Reaction extends Observer { /** * Flag indicating whether this reaction has been scheduled. i.e. has a pending reaction. */ private boolean _scheduled; Reaction( @Nonnull final ArezContext context, @Nullable final String name ) { super( context, name ); setOnStale( this::schedule ); } final boolean isScheduled() { return _scheduled; } final void schedule() { Guards.invariant( this::isActive, () -> String.format( "Observer named '%s' is not active but an attempt has been made to schedule observer.", getName() ) ); if ( !_scheduled ) { _scheduled = true; getContext().scheduleReaction( this ); } } } ## Instruction: Add the transaction mode to the reaction ## Code After: package org.realityforge.arez.api2; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class Reaction extends Observer { /** * Flag indicating whether this reaction has been scheduled. i.e. has a pending reaction. */ private boolean _scheduled; /** * The transaction mode in which the action executes. */ @Nonnull private final TransactionMode _mode; Reaction( @Nonnull final ArezContext context, @Nullable final String name, @Nonnull final TransactionMode mode ) { super( context, name ); setOnStale( this::schedule ); _mode = Objects.requireNonNull( mode ); } /** * Return the transaction mode in which the action executes. * * @return the transaction mode in which the action executes. */ @Nonnull final TransactionMode getMode() { return _mode; } final boolean isScheduled() { return _scheduled; } final void schedule() { Guards.invariant( this::isActive, () -> String.format( "Observer named '%s' is not active but an attempt has been made to schedule observer.", getName() ) ); if ( !_scheduled ) { _scheduled = true; getContext().scheduleReaction( this ); } } }
... package org.realityforge.arez.api2; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; ... * Flag indicating whether this reaction has been scheduled. i.e. has a pending reaction. */ private boolean _scheduled; /** * The transaction mode in which the action executes. */ @Nonnull private final TransactionMode _mode; Reaction( @Nonnull final ArezContext context, @Nullable final String name, @Nonnull final TransactionMode mode ) { super( context, name ); setOnStale( this::schedule ); _mode = Objects.requireNonNull( mode ); } /** * Return the transaction mode in which the action executes. * * @return the transaction mode in which the action executes. */ @Nonnull final TransactionMode getMode() { return _mode; } final boolean isScheduled() ...
78c13173fadbdc3d261ab3690ffb9c37d8f8a72d
bootstrap.py
bootstrap.py
from __future__ import print_function from getpass import getpass import readline import sys import annotator from annotator.model import Consumer, User if __name__ == '__main__': r = raw_input("This program will perform initial setup of the annotation \n" "store, and create the required admin accounts. Proceed? [Y/n] ") if r and r[0] in ['n', 'N']: sys.exit(1) print("\nCreating SQLite database and ElasticSearch indices... ", end="") annotator.create_app() annotator.create_all() print("done.\n") username = raw_input("Admin username [admin]: ").strip() if not username: username = 'admin' email = '' while not email: email = raw_input("Admin email: ").strip() password = '' while not password: password = getpass("Admin password: ") ckey = raw_input("Primary consumer key [annotateit]: ").strip() if not ckey: ckey = 'annotateit' with annotator.app.test_request_context(): print("\nCreating admin user... ", end="") u = User(username, email, password) annotator.db.session.add(u) annotator.db.session.commit() print("done.") print("Creating primary consumer... ", end="") c = Consumer(ckey) c.user_id = u.id annotator.db.session.add(c) annotator.db.session.commit() print("done.\n") print("Primary consumer secret: %s" % c.secret)
from __future__ import print_function from getpass import getpass import readline import sys import annotator from annotator.model import Consumer, User if __name__ == '__main__': r = raw_input("This program will perform initial setup of the annotation \n" "store, and create the required admin accounts. Proceed? [Y/n] ") if r and r[0] in ['n', 'N']: sys.exit(1) print("\nCreating SQLite database and ElasticSearch indices... ", end="") app = annotator.create_app() annotator.create_all(app) print("done.\n") username = raw_input("Admin username [admin]: ").strip() if not username: username = 'admin' email = '' while not email: email = raw_input("Admin email: ").strip() password = '' while not password: password = getpass("Admin password: ") ckey = raw_input("Primary consumer key [annotateit]: ").strip() if not ckey: ckey = 'annotateit' with app.test_request_context(): db = app.extensions['sqlalchemy'].db print("\nCreating admin user... ", end="") u = User(username, email, password) db.session.add(u) db.session.commit() print("done.") print("Creating primary consumer... ", end="") c = Consumer(ckey) c.user_id = u.id db.session.add(c) db.session.commit() print("done.\n") print("Primary consumer secret: %s" % c.secret)
Update to reflect new create_app signature
Update to reflect new create_app signature
Python
mit
openannotation/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,ningyifan/annotator-store,nobita-isc/annotator-store,nobita-isc/annotator-store,happybelly/annotator-store
python
## Code Before: from __future__ import print_function from getpass import getpass import readline import sys import annotator from annotator.model import Consumer, User if __name__ == '__main__': r = raw_input("This program will perform initial setup of the annotation \n" "store, and create the required admin accounts. Proceed? [Y/n] ") if r and r[0] in ['n', 'N']: sys.exit(1) print("\nCreating SQLite database and ElasticSearch indices... ", end="") annotator.create_app() annotator.create_all() print("done.\n") username = raw_input("Admin username [admin]: ").strip() if not username: username = 'admin' email = '' while not email: email = raw_input("Admin email: ").strip() password = '' while not password: password = getpass("Admin password: ") ckey = raw_input("Primary consumer key [annotateit]: ").strip() if not ckey: ckey = 'annotateit' with annotator.app.test_request_context(): print("\nCreating admin user... ", end="") u = User(username, email, password) annotator.db.session.add(u) annotator.db.session.commit() print("done.") print("Creating primary consumer... ", end="") c = Consumer(ckey) c.user_id = u.id annotator.db.session.add(c) annotator.db.session.commit() print("done.\n") print("Primary consumer secret: %s" % c.secret) ## Instruction: Update to reflect new create_app signature ## Code After: from __future__ import print_function from getpass import getpass import readline import sys import annotator from annotator.model import Consumer, User if __name__ == '__main__': r = raw_input("This program will perform initial setup of the annotation \n" "store, and create the required admin accounts. Proceed? [Y/n] ") if r and r[0] in ['n', 'N']: sys.exit(1) print("\nCreating SQLite database and ElasticSearch indices... ", end="") app = annotator.create_app() annotator.create_all(app) print("done.\n") username = raw_input("Admin username [admin]: ").strip() if not username: username = 'admin' email = '' while not email: email = raw_input("Admin email: ").strip() password = '' while not password: password = getpass("Admin password: ") ckey = raw_input("Primary consumer key [annotateit]: ").strip() if not ckey: ckey = 'annotateit' with app.test_request_context(): db = app.extensions['sqlalchemy'].db print("\nCreating admin user... ", end="") u = User(username, email, password) db.session.add(u) db.session.commit() print("done.") print("Creating primary consumer... ", end="") c = Consumer(ckey) c.user_id = u.id db.session.add(c) db.session.commit() print("done.\n") print("Primary consumer secret: %s" % c.secret)
# ... existing code ... print("\nCreating SQLite database and ElasticSearch indices... ", end="") app = annotator.create_app() annotator.create_all(app) print("done.\n") # ... modified code ... if not ckey: ckey = 'annotateit' with app.test_request_context(): db = app.extensions['sqlalchemy'].db print("\nCreating admin user... ", end="") u = User(username, email, password) db.session.add(u) db.session.commit() print("done.") ... c = Consumer(ckey) c.user_id = u.id db.session.add(c) db.session.commit() print("done.\n") # ... rest of the code ...
2f635e890414f777fbe3ddde1aea74ab13558313
llvmlite/tests/test_dylib.py
llvmlite/tests/test_dylib.py
import unittest from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") system = platform.system() if system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception))
from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform from ctypes.util import find_library import unittest @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") if self.system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif self.system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) def test_libm(self): try: if self.system == "Linux": libm = find_library("m") elif self.system == "Darwin": libm = find_library("libm") dylib.load_library_permanently(libm) except Exception: self.fail("Valid call to link library should not fail.")
Add tests to check loading library.
Add tests to check loading library.
Python
bsd-2-clause
m-labs/llvmlite,pitrou/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,markdewing/llvmlite,pitrou/llvmlite,numba/llvmlite,markdewing/llvmlite,sklam/llvmlite,sklam/llvmlite,pitrou/llvmlite,numba/llvmlite,ssarangi/llvmlite,markdewing/llvmlite,squisher/llvmlite,ssarangi/llvmlite,m-labs/llvmlite,numba/llvmlite,numba/llvmlite,squisher/llvmlite,squisher/llvmlite,sklam/llvmlite,ssarangi/llvmlite,sklam/llvmlite,squisher/llvmlite,markdewing/llvmlite,m-labs/llvmlite,pitrou/llvmlite
python
## Code Before: import unittest from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") system = platform.system() if system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) ## Instruction: Add tests to check loading library. ## Code After: from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform from ctypes.util import find_library import unittest @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") if self.system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif self.system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) def test_libm(self): try: if self.system == "Linux": libm = find_library("m") elif self.system == "Darwin": libm = find_library("libm") dylib.load_library_permanently(libm) except Exception: self.fail("Valid call to link library should not fail.")
# ... existing code ... from . import TestCase from llvmlite import binding as llvm from llvmlite.binding import dylib import platform from ctypes.util import find_library import unittest @unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS") class TestDylib(TestCase): def setUp(self): llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() self.system = platform.system() def test_bad_library(self): with self.assertRaises(Exception) as context: dylib.load_library_permanently("zzzasdkf;jasd;l") if self.system == "Linux": self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory' in str(context.exception)) elif self.system == "Darwin": self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found' in str(context.exception)) def test_libm(self): try: if self.system == "Linux": libm = find_library("m") elif self.system == "Darwin": libm = find_library("libm") dylib.load_library_permanently(libm) except Exception: self.fail("Valid call to link library should not fail.") # ... rest of the code ...
610d9a3c58f70d8b2002403003b705dd57513d92
manage.py
manage.py
import os import sys from django.core.management import execute_from_command_line from wger.main import get_user_config_path, setup_django_environment if __name__ == "__main__": setup_django_environment( get_user_config_path('wger', 'settings.py')) #os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings") execute_from_command_line(sys.argv)
import sys from django.core.management import execute_from_command_line from wger.utils.main import ( setup_django_environment, get_user_config_path ) if __name__ == "__main__": setup_django_environment( get_user_config_path('wger', 'settings.py')) #os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings") execute_from_command_line(sys.argv)
Change imports of helper functions
Change imports of helper functions These are now in the utils app
Python
agpl-3.0
rolandgeider/wger,kjagoo/wger_stark,rolandgeider/wger,wger-project/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,petervanderdoes/wger,rolandgeider/wger,petervanderdoes/wger,DeveloperMal/wger,wger-project/wger,wger-project/wger,rolandgeider/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,petervanderdoes/wger,kjagoo/wger_stark,DeveloperMal/wger
python
## Code Before: import os import sys from django.core.management import execute_from_command_line from wger.main import get_user_config_path, setup_django_environment if __name__ == "__main__": setup_django_environment( get_user_config_path('wger', 'settings.py')) #os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings") execute_from_command_line(sys.argv) ## Instruction: Change imports of helper functions These are now in the utils app ## Code After: import sys from django.core.management import execute_from_command_line from wger.utils.main import ( setup_django_environment, get_user_config_path ) if __name__ == "__main__": setup_django_environment( get_user_config_path('wger', 'settings.py')) #os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wger.workout_manager.settings") execute_from_command_line(sys.argv)
# ... existing code ... import sys from django.core.management import execute_from_command_line from wger.utils.main import ( setup_django_environment, get_user_config_path ) if __name__ == "__main__": setup_django_environment( # ... rest of the code ...
d2ab731f2f0748eb17cf60197f260b9f627e0511
common-model/src/main/java/com/sequenceiq/common/api/type/ImageType.java
common-model/src/main/java/com/sequenceiq/common/api/type/ImageType.java
package com.sequenceiq.common.api.type; public enum ImageType { FREEIPA, DATAHUB, DATALAKE, RUNTIME, UNKNOWN }
package com.sequenceiq.common.api.type; public enum ImageType { FREEIPA, RUNTIME, UNKNOWN }
Remove the obsoleted DATAHUB/DATALAKE enum values
CB-14466: Remove the obsoleted DATAHUB/DATALAKE enum values
Java
apache-2.0
hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak,hortonworks/cloudbreak
java
## Code Before: package com.sequenceiq.common.api.type; public enum ImageType { FREEIPA, DATAHUB, DATALAKE, RUNTIME, UNKNOWN } ## Instruction: CB-14466: Remove the obsoleted DATAHUB/DATALAKE enum values ## Code After: package com.sequenceiq.common.api.type; public enum ImageType { FREEIPA, RUNTIME, UNKNOWN }
// ... existing code ... public enum ImageType { FREEIPA, RUNTIME, UNKNOWN } // ... rest of the code ...
aafa37c83c1464c16c2c6b69cc1546a537ec99a3
main/forms.py
main/forms.py
from django import forms from main.fields import RegexField class IndexForm(forms.Form): id_list = forms.CharField( widget=forms.Textarea, label='ID List', help_text='List of students IDs to query, one per line.') student_id_regex = RegexField( label='Student ID regex', help_text='Regular expression used to match the student ID in each ' 'line. If cannot match (or a student is not found in the ' 'database), then the line is left as is.', initial=r'\b\d{7,}\b', widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
from django import forms from main.fields import RegexField class IndexForm(forms.Form): id_list = forms.CharField( help_text='List of students IDs to query, one per line.', label='ID List', widget=forms.Textarea(attrs={ 'placeholder': 'Random text\n1234567\n7654321'})) student_id_regex = RegexField( label='Student ID regex', help_text='Regular expression used to match the student ID in each ' 'line. If cannot match (or a student is not found in the ' 'database), then the line is left as is.', initial=r'\b\d{7,}\b', widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
Add placeholder to ID list field
Add placeholder to ID list field
Python
mit
m4tx/usos-id-mapper,m4tx/usos-id-mapper
python
## Code Before: from django import forms from main.fields import RegexField class IndexForm(forms.Form): id_list = forms.CharField( widget=forms.Textarea, label='ID List', help_text='List of students IDs to query, one per line.') student_id_regex = RegexField( label='Student ID regex', help_text='Regular expression used to match the student ID in each ' 'line. If cannot match (or a student is not found in the ' 'database), then the line is left as is.', initial=r'\b\d{7,}\b', widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'})) ## Instruction: Add placeholder to ID list field ## Code After: from django import forms from main.fields import RegexField class IndexForm(forms.Form): id_list = forms.CharField( help_text='List of students IDs to query, one per line.', label='ID List', widget=forms.Textarea(attrs={ 'placeholder': 'Random text\n1234567\n7654321'})) student_id_regex = RegexField( label='Student ID regex', help_text='Regular expression used to match the student ID in each ' 'line. If cannot match (or a student is not found in the ' 'database), then the line is left as is.', initial=r'\b\d{7,}\b', widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
# ... existing code ... class IndexForm(forms.Form): id_list = forms.CharField( help_text='List of students IDs to query, one per line.', label='ID List', widget=forms.Textarea(attrs={ 'placeholder': 'Random text\n1234567\n7654321'})) student_id_regex = RegexField( label='Student ID regex', help_text='Regular expression used to match the student ID in each ' # ... rest of the code ...
f96ca22026ed189f63adc723daff443ff8562ba3
src/common/const.h
src/common/const.h
typedef unsigned long long uint64; namespace Const { constexpr int HASH_BASE = 31415927; constexpr double EPS = 1e-6; constexpr double PI = M_PI; inline int randUInt() { #ifdef _WIN32 return (rand() << 15) | rand(); #else return rand(); #endif } inline uint64 randUInt64() { #ifdef _WIN32 return (((((1ll * rand() << 15) | rand()) << 15) | rand()) << 15) | rand(); #else return (1ll * rand() << 31) | rand(); #endif } inline double randDouble() { #ifdef _WIN32 return 1.0 * randUInt() / (1 << 20); #else return 1.0 * rand() / RAND_MAX; #endif } } #endif // CONST_H
typedef unsigned long long uint64; namespace Const { constexpr int HASH_BASE = 31415927; constexpr double EPS = 1e-6; constexpr double PI = M_PI; inline int randUInt() { #ifdef _WIN32 return (rand() << 15) | rand(); #else return rand(); #endif } inline uint64 randUInt64() { #ifdef _WIN32 return (((((1ll * rand() << 15) | rand()) << 15) | rand()) << 15) | rand(); #else return (1ll * rand() << 31) | rand(); #endif } inline double randDouble() { #ifdef _WIN32 return 1.0 * randUInt() / (1 << 30); #else return 1.0 * rand() / RAND_MAX; #endif } } // namespace Const #endif // CONST_H
Fix randDouble() bug on windows
Fix randDouble() bug on windows
C
mit
equation314/3DRender
c
## Code Before: typedef unsigned long long uint64; namespace Const { constexpr int HASH_BASE = 31415927; constexpr double EPS = 1e-6; constexpr double PI = M_PI; inline int randUInt() { #ifdef _WIN32 return (rand() << 15) | rand(); #else return rand(); #endif } inline uint64 randUInt64() { #ifdef _WIN32 return (((((1ll * rand() << 15) | rand()) << 15) | rand()) << 15) | rand(); #else return (1ll * rand() << 31) | rand(); #endif } inline double randDouble() { #ifdef _WIN32 return 1.0 * randUInt() / (1 << 20); #else return 1.0 * rand() / RAND_MAX; #endif } } #endif // CONST_H ## Instruction: Fix randDouble() bug on windows ## Code After: typedef unsigned long long uint64; namespace Const { constexpr int HASH_BASE = 31415927; constexpr double EPS = 1e-6; constexpr double PI = M_PI; inline int randUInt() { #ifdef _WIN32 return (rand() << 15) | rand(); #else return rand(); #endif } inline uint64 randUInt64() { #ifdef _WIN32 return (((((1ll * rand() << 15) | rand()) << 15) | rand()) << 15) | rand(); #else return (1ll * rand() << 31) | rand(); #endif } inline double randDouble() { #ifdef _WIN32 return 1.0 * randUInt() / (1 << 30); #else return 1.0 * rand() / RAND_MAX; #endif } } // namespace Const #endif // CONST_H
... inline double randDouble() { #ifdef _WIN32 return 1.0 * randUInt() / (1 << 30); #else return 1.0 * rand() / RAND_MAX; #endif } } // namespace Const #endif // CONST_H ...
850c5c6f133fdfd131605eb1bf1e971b33dd7416
website/addons/twofactor/tests/test_views.py
website/addons/twofactor/tests/test_views.py
from nose.tools import * from webtest_plus import TestApp from tests.base import OsfTestCase from tests.factories import AuthUserFactory from website.app import init_app from website.addons.twofactor.tests import _valid_code app = init_app( routes=True, set_backends=False, settings_module='website.settings', ) class TestViews(OsfTestCase): def setUp(self): super(TestViews, self).setUp() self.user = AuthUserFactory() self.user.add_addon('twofactor') self.user_settings = self.user.get_addon('twofactor') self.app = TestApp(app) def test_confirm_code(self): # Send a valid code to the API endpoint for the user settings. res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': _valid_code(self.user_settings.totp_secret)}, auth=self.user.auth ) # reload the user settings object from the DB self.user_settings.reload() assert_true(self.user_settings.is_confirmed) assert_equal(res.status_code, 200)
from nose.tools import * from webtest.app import AppError from webtest_plus import TestApp from tests.base import OsfTestCase from tests.factories import AuthUserFactory from website.app import init_app from website.addons.twofactor.tests import _valid_code app = init_app( routes=True, set_backends=False, settings_module='website.settings', ) class TestViews(OsfTestCase): def setUp(self): super(TestViews, self).setUp() self.user = AuthUserFactory() self.user.add_addon('twofactor') self.user_settings = self.user.get_addon('twofactor') self.app = TestApp(app) def test_confirm_code(self): # Send a valid code to the API endpoint for the user settings. res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': _valid_code(self.user_settings.totp_secret)}, auth=self.user.auth ) # reload the user settings object from the DB self.user_settings.reload() assert_true(self.user_settings.is_confirmed) assert_equal(res.status_code, 200) def test_confirm_code_failure(self): with assert_raises(AppError) as error: res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': '000000'}, auth=self.user.auth ) assert_in('403 FORBIDDEN', error.message) # reload the user settings object from the DB self.user_settings.reload() assert_false(self.user_settings.is_confirmed)
Add test for failure to confirm 2FA code
Add test for failure to confirm 2FA code
Python
apache-2.0
doublebits/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,barbour-em/osf.io,wearpants/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,dplorimer/osf,amyshi188/osf.io,SSJohns/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,mattclark/osf.io,SSJohns/osf.io,amyshi188/osf.io,caneruguz/osf.io,ckc6cz/osf.io,baylee-d/osf.io,hmoco/osf.io,Nesiehr/osf.io,revanthkolli/osf.io,Ghalko/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,mluo613/osf.io,sloria/osf.io,caseyrygt/osf.io,bdyetton/prettychart,erinspace/osf.io,binoculars/osf.io,himanshuo/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,reinaH/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,binoculars/osf.io,mluke93/osf.io,himanshuo/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,abought/osf.io,hmoco/osf.io,AndrewSallans/osf.io,caseyrollins/osf.io,zamattiac/osf.io,kwierman/osf.io,jolene-esposito/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,chrisseto/osf.io,revanthkolli/osf.io,rdhyee/osf.io,barbour-em/osf.io,abought/osf.io,acshi/osf.io,lyndsysimon/osf.io,monikagrabowska/osf.io,mluo613/osf.io,arpitar/osf.io,leb2dg/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,asanfilippo7/osf.io,wearpants/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,erinspace/osf.io,sbt9uc/osf.io,asanfilippo7/osf.io,abought/osf.io,caneruguz/osf.io,icereval/osf.io,felliott/osf.io,KAsante95/osf.io,kch8qx/osf.io,leb2dg/osf.io,samanehsan/osf.io,cldershem/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,fabianvf/osf.io,chrisseto/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,emetsger/osf.io,barbour-em/osf.io,jolene-esposito/osf.io,dplorimer/osf,mfraezz/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,billyhunt/osf.io,samchrisinger/osf.io,acshi/osf.io,fabianvf/osf.io,samanehsan/osf.io,felliott/osf.io,revanthkolli/osf.io,Nesiehr/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,MerlinZhang/osf.io,mluo613/osf.io,mfraezz/osf.io,zamattiac/osf.io,emetsger/osf.io,AndrewSallans/osf.io,alexschiller/osf.io,TomBaxter/osf.io,doublebits/osf.io,cosenal/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,zkraime/osf.io,erinspace/osf.io,felliott/osf.io,wearpants/osf.io,petermalcolm/osf.io,mluke93/osf.io,himanshuo/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,zkraime/osf.io,jinluyuan/osf.io,rdhyee/osf.io,sloria/osf.io,lamdnhan/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,fabianvf/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,mluo613/osf.io,TomHeatwole/osf.io,HarryRybacki/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,icereval/osf.io,barbour-em/osf.io,aaxelb/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,mattclark/osf.io,kch8qx/osf.io,chennan47/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,samchrisinger/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,chennan47/osf.io,jinluyuan/osf.io,acshi/osf.io,cwisecarver/osf.io,Ghalko/osf.io,GaryKriebel/osf.io,Johnetordoff/osf.io,adlius/osf.io,acshi/osf.io,DanielSBrown/osf.io,mattclark/osf.io,ticklemepierce/osf.io,GaryKriebel/osf.io,arpitar/osf.io,kwierman/osf.io,wearpants/osf.io,reinaH/osf.io,cwisecarver/osf.io,sbt9uc/osf.io,aaxelb/osf.io,caneruguz/osf.io,adlius/osf.io,amyshi188/osf.io,leb2dg/osf.io,dplorimer/osf,petermalcolm/osf.io,cldershem/osf.io,HarryRybacki/osf.io,cldershem/osf.io,cslzchen/osf.io,sbt9uc/osf.io,binoculars/osf.io,Nesiehr/osf.io,danielneis/osf.io,zkraime/osf.io,baylee-d/osf.io,jeffreyliu3230/osf.io,adlius/osf.io,Johnetordoff/osf.io,MerlinZhang/osf.io,kushG/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,njantrania/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,mluo613/osf.io,kushG/osf.io,sloria/osf.io,Ghalko/osf.io,ticklemepierce/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,leb2dg/osf.io,lyndsysimon/osf.io,caseyrygt/osf.io,mfraezz/osf.io,GageGaskins/osf.io,njantrania/osf.io,brandonPurvis/osf.io,kwierman/osf.io,GaryKriebel/osf.io,billyhunt/osf.io,GageGaskins/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,fabianvf/osf.io,mluke93/osf.io,hmoco/osf.io,laurenrevere/osf.io,baylee-d/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,jmcarp/osf.io,lyndsysimon/osf.io,lyndsysimon/osf.io,saradbowman/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,cldershem/osf.io,billyhunt/osf.io,emetsger/osf.io,lamdnhan/osf.io,zamattiac/osf.io,jnayak1/osf.io,danielneis/osf.io,samchrisinger/osf.io,arpitar/osf.io,mluke93/osf.io,RomanZWang/osf.io,acshi/osf.io,doublebits/osf.io,jolene-esposito/osf.io,reinaH/osf.io,zkraime/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,dplorimer/osf,petermalcolm/osf.io,cslzchen/osf.io,bdyetton/prettychart,rdhyee/osf.io,RomanZWang/osf.io,felliott/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,lamdnhan/osf.io,mfraezz/osf.io,jinluyuan/osf.io,reinaH/osf.io,samanehsan/osf.io,chennan47/osf.io,zachjanicki/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,kushG/osf.io,crcresearch/osf.io,adlius/osf.io,brianjgeiger/osf.io,lamdnhan/osf.io,jnayak1/osf.io,arpitar/osf.io,pattisdr/osf.io,GageGaskins/osf.io,alexschiller/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,revanthkolli/osf.io,cosenal/osf.io,DanielSBrown/osf.io,hmoco/osf.io,jeffreyliu3230/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,jeffreyliu3230/osf.io,bdyetton/prettychart,jnayak1/osf.io,crcresearch/osf.io,ckc6cz/osf.io,kch8qx/osf.io,pattisdr/osf.io,himanshuo/osf.io,RomanZWang/osf.io,jinluyuan/osf.io,njantrania/osf.io,brianjgeiger/osf.io,ckc6cz/osf.io,kushG/osf.io,njantrania/osf.io,danielneis/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,caseyrygt/osf.io,doublebits/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,doublebits/osf.io,Ghalko/osf.io,samanehsan/osf.io,kwierman/osf.io,caneruguz/osf.io,RomanZWang/osf.io,saradbowman/osf.io,emetsger/osf.io,sbt9uc/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,jmcarp/osf.io,kch8qx/osf.io,abought/osf.io,KAsante95/osf.io,kch8qx/osf.io,icereval/osf.io,KAsante95/osf.io,jeffreyliu3230/osf.io
python
## Code Before: from nose.tools import * from webtest_plus import TestApp from tests.base import OsfTestCase from tests.factories import AuthUserFactory from website.app import init_app from website.addons.twofactor.tests import _valid_code app = init_app( routes=True, set_backends=False, settings_module='website.settings', ) class TestViews(OsfTestCase): def setUp(self): super(TestViews, self).setUp() self.user = AuthUserFactory() self.user.add_addon('twofactor') self.user_settings = self.user.get_addon('twofactor') self.app = TestApp(app) def test_confirm_code(self): # Send a valid code to the API endpoint for the user settings. res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': _valid_code(self.user_settings.totp_secret)}, auth=self.user.auth ) # reload the user settings object from the DB self.user_settings.reload() assert_true(self.user_settings.is_confirmed) assert_equal(res.status_code, 200) ## Instruction: Add test for failure to confirm 2FA code ## Code After: from nose.tools import * from webtest.app import AppError from webtest_plus import TestApp from tests.base import OsfTestCase from tests.factories import AuthUserFactory from website.app import init_app from website.addons.twofactor.tests import _valid_code app = init_app( routes=True, set_backends=False, settings_module='website.settings', ) class TestViews(OsfTestCase): def setUp(self): super(TestViews, self).setUp() self.user = AuthUserFactory() self.user.add_addon('twofactor') self.user_settings = self.user.get_addon('twofactor') self.app = TestApp(app) def test_confirm_code(self): # Send a valid code to the API endpoint for the user settings. res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': _valid_code(self.user_settings.totp_secret)}, auth=self.user.auth ) # reload the user settings object from the DB self.user_settings.reload() assert_true(self.user_settings.is_confirmed) assert_equal(res.status_code, 200) def test_confirm_code_failure(self): with assert_raises(AppError) as error: res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': '000000'}, auth=self.user.auth ) assert_in('403 FORBIDDEN', error.message) # reload the user settings object from the DB self.user_settings.reload() assert_false(self.user_settings.is_confirmed)
# ... existing code ... from nose.tools import * from webtest.app import AppError from webtest_plus import TestApp from tests.base import OsfTestCase # ... modified code ... assert_true(self.user_settings.is_confirmed) assert_equal(res.status_code, 200) def test_confirm_code_failure(self): with assert_raises(AppError) as error: res = self.app.post_json( '/api/v1/settings/twofactor/', {'code': '000000'}, auth=self.user.auth ) assert_in('403 FORBIDDEN', error.message) # reload the user settings object from the DB self.user_settings.reload() assert_false(self.user_settings.is_confirmed) # ... rest of the code ...
6ffa10ad56acefe3d3178ff140ebe048bb1a1df9
Code/Python/Kamaelia/Kamaelia/Apps/SocialBookmarks/Print.py
Code/Python/Kamaelia/Kamaelia/Apps/SocialBookmarks/Print.py
import sys import os import inspect def __LINE__ (): caller = inspect.stack()[1] return int (caller[2]) def __FUNC__ (): caller = inspect.stack()[1] return caller[3] def __BOTH__(): caller = inspect.stack()[1] return int (caller[2]), caller[3], caller[1] def Print(*args): caller = inspect.stack()[1] filename = str(os.path.basename(caller[1])) sys.stdout.write(filename+ " : "+ str(int (caller[2])) + " : ") for arg in args: try: x = str(arg) except: pass try: print x, except: try: print unicode(x, errors="ignore"), except: try: sys.stdout.write(arg.encode("ascii","ignore")) except: print "FAILED PRINT" print sys.stdout.flush()
import sys import os import inspect import time def __LINE__ (): caller = inspect.stack()[1] return int (caller[2]) def __FUNC__ (): caller = inspect.stack()[1] return caller[3] def __BOTH__(): caller = inspect.stack()[1] return int (caller[2]), caller[3], caller[1] def Print(*args): caller = inspect.stack()[1] filename = str(os.path.basename(caller[1])) sys.stdout.write(filename+ " : "+ str(int (caller[2])) + " : ") sys.stdout.write(str(time.time()) + " : ") for arg in args: try: x = str(arg) except: pass try: print x, except: try: print unicode(x, errors="ignore"), except: try: sys.stdout.write(arg.encode("ascii","ignore")) except: print "FAILED PRINT" print sys.stdout.flush()
Add in the timestamp for each message, to enable tracking of how long problems take to resolve
Add in the timestamp for each message, to enable tracking of how long problems take to resolve
Python
apache-2.0
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
python
## Code Before: import sys import os import inspect def __LINE__ (): caller = inspect.stack()[1] return int (caller[2]) def __FUNC__ (): caller = inspect.stack()[1] return caller[3] def __BOTH__(): caller = inspect.stack()[1] return int (caller[2]), caller[3], caller[1] def Print(*args): caller = inspect.stack()[1] filename = str(os.path.basename(caller[1])) sys.stdout.write(filename+ " : "+ str(int (caller[2])) + " : ") for arg in args: try: x = str(arg) except: pass try: print x, except: try: print unicode(x, errors="ignore"), except: try: sys.stdout.write(arg.encode("ascii","ignore")) except: print "FAILED PRINT" print sys.stdout.flush() ## Instruction: Add in the timestamp for each message, to enable tracking of how long problems take to resolve ## Code After: import sys import os import inspect import time def __LINE__ (): caller = inspect.stack()[1] return int (caller[2]) def __FUNC__ (): caller = inspect.stack()[1] return caller[3] def __BOTH__(): caller = inspect.stack()[1] return int (caller[2]), caller[3], caller[1] def Print(*args): caller = inspect.stack()[1] filename = str(os.path.basename(caller[1])) sys.stdout.write(filename+ " : "+ str(int (caller[2])) + " : ") sys.stdout.write(str(time.time()) + " : ") for arg in args: try: x = str(arg) except: pass try: print x, except: try: print unicode(x, errors="ignore"), except: try: sys.stdout.write(arg.encode("ascii","ignore")) except: print "FAILED PRINT" print sys.stdout.flush()
# ... existing code ... import sys import os import inspect import time def __LINE__ (): caller = inspect.stack()[1] # ... modified code ... caller = inspect.stack()[1] filename = str(os.path.basename(caller[1])) sys.stdout.write(filename+ " : "+ str(int (caller[2])) + " : ") sys.stdout.write(str(time.time()) + " : ") for arg in args: try: x = str(arg) # ... rest of the code ...
ec6c47796697ca26c12e2ca8269812442473dcd5
pynuts/filters.py
pynuts/filters.py
"""Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import QuerySelectField, QuerySelectMultipleField def data(field): """Return data according to a specific field.""" if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) return escape(field.data)
"""Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Return data according to a specific field.""" if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
Add a read filter for boolean fields
Add a read filter for boolean fields
Python
bsd-3-clause
Kozea/Pynuts,Kozea/Pynuts,Kozea/Pynuts
python
## Code Before: """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import QuerySelectField, QuerySelectMultipleField def data(field): """Return data according to a specific field.""" if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) return escape(field.data) ## Instruction: Add a read filter for boolean fields ## Code After: """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): """Return data according to a specific field.""" if isinstance(field, QuerySelectMultipleField): if field.data: return escape( u', '.join(field.get_label(data) for data in field.data)) elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data)
// ... existing code ... """Jinja environment filters for Pynuts.""" from flask import escape from flask.ext.wtf import ( QuerySelectField, QuerySelectMultipleField, BooleanField) def data(field): // ... modified code ... elif isinstance(field, QuerySelectField): if field.data: return escape(field.get_label(field.data)) elif isinstance(field, BooleanField): return u'✓' if field.data else u'✕' return escape(field.data) // ... rest of the code ...
9b4f83ec89c76d8a5b5d0502e2903e2821078271
logger.py
logger.py
import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() print(line) outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
Print lines that are logged
Print lines that are logged
Python
mit
wapcaplet/ardiff
python
## Code Before: import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename) ## Instruction: Print lines that are logged ## Code After: import sys import serial def log_serial(filename, device='/dev/ttyACM0', baud=9600): ser = serial.Serial(device, baud) outfile = open(filename, 'w') while True: line = ser.readline() print(line) outfile.write(line) if __name__ == '__main__': filename = sys.argv[1] log_serial(filename)
// ... existing code ... outfile = open(filename, 'w') while True: line = ser.readline() print(line) outfile.write(line) if __name__ == '__main__': // ... rest of the code ...
df2aa1efbb940aa7bf5ef49235e1d7aff0d52128
test/Analysis/array-struct.c
test/Analysis/array-struct.c
// RUN: clang -checker-simple -verify %s // RUN: clang -checker-simple -analyzer-store-region -verify %s struct s { int data; int data_array[10]; }; typedef struct { int data; } STYPE; void f(void) { int a[10]; int (*p)[10]; p = &a; (*p)[3] = 1; struct s d; struct s *q; q = &d; q->data = 3; d.data_array[9] = 17; } void f2() { char *p = "/usr/local"; char (*q)[4]; q = &"abc"; } void f3() { STYPE s; }
// RUN: clang -checker-simple -verify %s // RUN: clang -checker-simple -analyzer-store-region -verify %s struct s { int data; int data_array[10]; }; typedef struct { int data; } STYPE; void f(void) { int a[10]; int (*p)[10]; p = &a; (*p)[3] = 1; struct s d; struct s *q; q = &d; q->data = 3; d.data_array[9] = 17; } void f2() { char *p = "/usr/local"; char (*q)[4]; q = &"abc"; } void f3() { STYPE s; } void f4() { int a[] = { 1, 2, 3}; int b[3] = { 1, 2 }; }
Add test code for array initialization.
Add test code for array initialization. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@58502 91177308-0d34-0410-b5e6-96231b3b80d8
C
apache-2.0
llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang
c
## Code Before: // RUN: clang -checker-simple -verify %s // RUN: clang -checker-simple -analyzer-store-region -verify %s struct s { int data; int data_array[10]; }; typedef struct { int data; } STYPE; void f(void) { int a[10]; int (*p)[10]; p = &a; (*p)[3] = 1; struct s d; struct s *q; q = &d; q->data = 3; d.data_array[9] = 17; } void f2() { char *p = "/usr/local"; char (*q)[4]; q = &"abc"; } void f3() { STYPE s; } ## Instruction: Add test code for array initialization. git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@58502 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: // RUN: clang -checker-simple -verify %s // RUN: clang -checker-simple -analyzer-store-region -verify %s struct s { int data; int data_array[10]; }; typedef struct { int data; } STYPE; void f(void) { int a[10]; int (*p)[10]; p = &a; (*p)[3] = 1; struct s d; struct s *q; q = &d; q->data = 3; d.data_array[9] = 17; } void f2() { char *p = "/usr/local"; char (*q)[4]; q = &"abc"; } void f3() { STYPE s; } void f4() { int a[] = { 1, 2, 3}; int b[3] = { 1, 2 }; }
... void f3() { STYPE s; } void f4() { int a[] = { 1, 2, 3}; int b[3] = { 1, 2 }; } ...
8556437ee02de028ec5de3b867abaab82533cb91
keystone/tests/unit/common/test_manager.py
keystone/tests/unit/common/test_manager.py
import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8)
import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
Correct test to support changing N release name
Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121
Python
apache-2.0
ilay09/keystone,rajalokan/keystone,openstack/keystone,mahak/keystone,klmitch/keystone,openstack/keystone,ilay09/keystone,cernops/keystone,rajalokan/keystone,cernops/keystone,mahak/keystone,ilay09/keystone,rajalokan/keystone,klmitch/keystone,mahak/keystone,openstack/keystone
python
## Code Before: import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': 'N', } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertIsInstance(impl, catalog.CatalogDriverV8) ## Instruction: Correct test to support changing N release name oslo.log is going to change to use Newton rather than N so this test should not make an assumption about the way that versionutils.deprecated is calling report_deprecated_feature. Change-Id: I06aa6d085232376811f73597b2d84b5174bc7a8d Closes-Bug: 1561121 ## Code After: import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
// ... existing code ... 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8) // ... rest of the code ...
40df12ab28555cc810073e9b16b8a9ce25f85618
src/main/java/com/hp/autonomy/types/requests/idol/actions/tags/TagName.java
src/main/java/com/hp/autonomy/types/requests/idol/actions/tags/TagName.java
/* * Copyright 2015 Hewlett-Packard Development Company, L.P. * Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. */ package com.hp.autonomy.types.requests.idol.actions.tags; import lombok.Data; import java.io.Serializable; @SuppressWarnings({"WeakerAccess", "unused"}) @Data public class TagName implements Serializable { private static final long serialVersionUID = -6221132711228529797L; private final String id; private final String name; public TagName(final String name, final String id) { this.name = name; this.id = id; } }
/* * Copyright 2015 Hewlett-Packard Development Company, L.P. * Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. */ package com.hp.autonomy.types.requests.idol.actions.tags; import com.fasterxml.jackson.annotation.JsonCreator; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import java.io.Serializable; @SuppressWarnings({"WeakerAccess", "unused"}) @Getter @NoArgsConstructor @EqualsAndHashCode public class TagName implements Serializable { private static final long serialVersionUID = -6221132711228529797L; private String id; private String name; @JsonCreator public TagName(final String path) { name = getFieldNameFromPath(path); id = adjustFieldPath(path, name); } private String adjustFieldPath(final String fieldPath, final String fieldName) { // Need an extra '/' to be able to query a field by its root path (since wildcards in Idol config file take the form */SOME_FIELD) // However, for the special autn_date field which does not have a path, adding such a '/' would break the query return fieldName.equals(fieldPath) || fieldPath.startsWith("/") ? fieldPath : '/' + fieldPath; } private String getFieldNameFromPath(final String value) { return value.contains("/") ? value.substring(value.lastIndexOf('/') + 1) : value; } }
Undo change a3351c6 as the update to haven-search-components required to use it is currently on hold
Undo change a3351c6 as the update to haven-search-components required to use it is currently on hold [rev. matthew.gordon]
Java
mit
hpe-idol/java-aci-types,hpe-idol/java-aci-types,hpautonomy/java-aci-types,hpautonomy/java-aci-types
java
## Code Before: /* * Copyright 2015 Hewlett-Packard Development Company, L.P. * Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. */ package com.hp.autonomy.types.requests.idol.actions.tags; import lombok.Data; import java.io.Serializable; @SuppressWarnings({"WeakerAccess", "unused"}) @Data public class TagName implements Serializable { private static final long serialVersionUID = -6221132711228529797L; private final String id; private final String name; public TagName(final String name, final String id) { this.name = name; this.id = id; } } ## Instruction: Undo change a3351c6 as the update to haven-search-components required to use it is currently on hold [rev. matthew.gordon] ## Code After: /* * Copyright 2015 Hewlett-Packard Development Company, L.P. * Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. */ package com.hp.autonomy.types.requests.idol.actions.tags; import com.fasterxml.jackson.annotation.JsonCreator; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import java.io.Serializable; @SuppressWarnings({"WeakerAccess", "unused"}) @Getter @NoArgsConstructor @EqualsAndHashCode public class TagName implements Serializable { private static final long serialVersionUID = -6221132711228529797L; private String id; private String name; @JsonCreator public TagName(final String path) { name = getFieldNameFromPath(path); id = adjustFieldPath(path, name); } private String adjustFieldPath(final String fieldPath, final String fieldName) { // Need an extra '/' to be able to query a field by its root path (since wildcards in Idol config file take the form */SOME_FIELD) // However, for the special autn_date field which does not have a path, adding such a '/' would break the query return fieldName.equals(fieldPath) || fieldPath.startsWith("/") ? fieldPath : '/' + fieldPath; } private String getFieldNameFromPath(final String value) { return value.contains("/") ? value.substring(value.lastIndexOf('/') + 1) : value; } }
// ... existing code ... package com.hp.autonomy.types.requests.idol.actions.tags; import com.fasterxml.jackson.annotation.JsonCreator; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import java.io.Serializable; @SuppressWarnings({"WeakerAccess", "unused"}) @Getter @NoArgsConstructor @EqualsAndHashCode public class TagName implements Serializable { private static final long serialVersionUID = -6221132711228529797L; private String id; private String name; @JsonCreator public TagName(final String path) { name = getFieldNameFromPath(path); id = adjustFieldPath(path, name); } private String adjustFieldPath(final String fieldPath, final String fieldName) { // Need an extra '/' to be able to query a field by its root path (since wildcards in Idol config file take the form */SOME_FIELD) // However, for the special autn_date field which does not have a path, adding such a '/' would break the query return fieldName.equals(fieldPath) || fieldPath.startsWith("/") ? fieldPath : '/' + fieldPath; } private String getFieldNameFromPath(final String value) { return value.contains("/") ? value.substring(value.lastIndexOf('/') + 1) : value; } } // ... rest of the code ...
d65991d5e2241ee3c1ce67cbaf49cbef3c53b291
src/tool/hpcrun/unwind/generic-libunwind/unw-datatypes-specific.h
src/tool/hpcrun/unwind/generic-libunwind/unw-datatypes-specific.h
// // This software was produced with support in part from the Defense Advanced // Research Projects Agency (DARPA) through AFRL Contract FA8650-09-C-1915. // Nothing in this work should be construed as reflecting the official policy or // position of the Defense Department, the United States government, or // Rice University. // #ifndef UNW_DATATYPES_SPECIFIC_H #define UNW_DATATYPES_SPECIFIC_H #include <libunwind.h> #include <unwind/common/fence_enum.h> #include <hpcrun/loadmap.h> #include <utilities/ip-normalized.h> typedef struct { load_module_t* lm; } intvl_t; typedef struct hpcrun_unw_cursor_t { void* pc_unnorm; fence_enum_t fence; // Details on which fence stopped an unwind unw_cursor_t uc; // normalized ip for first instruction in enclosing function ip_normalized_t the_function; ip_normalized_t pc_norm; } hpcrun_unw_cursor_t; #endif // UNW_DATATYPES_SPECIFIC_H
// // This software was produced with support in part from the Defense Advanced // Research Projects Agency (DARPA) through AFRL Contract FA8650-09-C-1915. // Nothing in this work should be construed as reflecting the official policy or // position of the Defense Department, the United States government, or // Rice University. // #ifndef UNW_DATATYPES_SPECIFIC_H #define UNW_DATATYPES_SPECIFIC_H #include <libunwind.h> #include <unwind/common/fence_enum.h> #include <hpcrun/loadmap.h> #include <utilities/ip-normalized.h> typedef struct { load_module_t* lm; } intvl_t; typedef struct hpcrun_unw_cursor_t { void* pc_unnorm; unw_cursor_t uc; // normalized ip for first instruction in enclosing function ip_normalized_t the_function; ip_normalized_t pc_norm; } hpcrun_unw_cursor_t; #endif // UNW_DATATYPES_SPECIFIC_H
Drop another unused libunw cursor member.
Drop another unused libunw cursor member.
C
bsd-3-clause
HPCToolkit/hpctoolkit,HPCToolkit/hpctoolkit,HPCToolkit/hpctoolkit,HPCToolkit/hpctoolkit,HPCToolkit/hpctoolkit,HPCToolkit/hpctoolkit
c
## Code Before: // // This software was produced with support in part from the Defense Advanced // Research Projects Agency (DARPA) through AFRL Contract FA8650-09-C-1915. // Nothing in this work should be construed as reflecting the official policy or // position of the Defense Department, the United States government, or // Rice University. // #ifndef UNW_DATATYPES_SPECIFIC_H #define UNW_DATATYPES_SPECIFIC_H #include <libunwind.h> #include <unwind/common/fence_enum.h> #include <hpcrun/loadmap.h> #include <utilities/ip-normalized.h> typedef struct { load_module_t* lm; } intvl_t; typedef struct hpcrun_unw_cursor_t { void* pc_unnorm; fence_enum_t fence; // Details on which fence stopped an unwind unw_cursor_t uc; // normalized ip for first instruction in enclosing function ip_normalized_t the_function; ip_normalized_t pc_norm; } hpcrun_unw_cursor_t; #endif // UNW_DATATYPES_SPECIFIC_H ## Instruction: Drop another unused libunw cursor member. ## Code After: // // This software was produced with support in part from the Defense Advanced // Research Projects Agency (DARPA) through AFRL Contract FA8650-09-C-1915. // Nothing in this work should be construed as reflecting the official policy or // position of the Defense Department, the United States government, or // Rice University. // #ifndef UNW_DATATYPES_SPECIFIC_H #define UNW_DATATYPES_SPECIFIC_H #include <libunwind.h> #include <unwind/common/fence_enum.h> #include <hpcrun/loadmap.h> #include <utilities/ip-normalized.h> typedef struct { load_module_t* lm; } intvl_t; typedef struct hpcrun_unw_cursor_t { void* pc_unnorm; unw_cursor_t uc; // normalized ip for first instruction in enclosing function ip_normalized_t the_function; ip_normalized_t pc_norm; } hpcrun_unw_cursor_t; #endif // UNW_DATATYPES_SPECIFIC_H
... typedef struct hpcrun_unw_cursor_t { void* pc_unnorm; unw_cursor_t uc; // normalized ip for first instruction in enclosing function ip_normalized_t the_function; ...
6c12f97bfed8b8a4749f75e1a508caf0ea310423
docker/update-production.py
docker/update-production.py
import argparse import subprocess import json import sys parser = argparse.ArgumentParser() args = parser.parse_args() def info(msg): sys.stdout.write('* {}\n'.format(msg)) sys.stdout.flush() info('Determining current production details...') output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode( 'utf-8') data = json.loads(output) linked_service = data['linked_to_service'][0]['name'] info('Currently linked service is \'{}\''.format(linked_service)) if linked_service == 'muzhack-green': link_to = 'muzhack-blue' else: assert linked_service == 'muzhack-blue' link_to = 'muzhack-green' info('Redeploying service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE) info('Linking to service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to), '--sync', 'lb'], stdout=subprocess.PIPE) info('Successfully switched production service to {}'.format(link_to))
import argparse import subprocess import json import sys parser = argparse.ArgumentParser() args = parser.parse_args() def info(msg): sys.stdout.write('* {}\n'.format(msg)) sys.stdout.flush() info('Determining current production details...') output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode( 'utf-8') data = json.loads(output) linked_service = data['linked_to_service'][0]['name'] info('Currently linked service is \'{}\''.format(linked_service)) if linked_service == 'muzhack-green': link_to = 'muzhack-blue' else: assert linked_service == 'muzhack-blue' link_to = 'muzhack-green' info('Redeploying service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE) info('Linking to service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to), '--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE) info('Successfully switched production service to {}'.format(link_to))
Make sure to update correct load balancer
Make sure to update correct load balancer
Python
mit
muzhack/musitechhub,muzhack/musitechhub,muzhack/muzhack,muzhack/muzhack,muzhack/musitechhub,muzhack/muzhack,muzhack/musitechhub,muzhack/muzhack
python
## Code Before: import argparse import subprocess import json import sys parser = argparse.ArgumentParser() args = parser.parse_args() def info(msg): sys.stdout.write('* {}\n'.format(msg)) sys.stdout.flush() info('Determining current production details...') output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode( 'utf-8') data = json.loads(output) linked_service = data['linked_to_service'][0]['name'] info('Currently linked service is \'{}\''.format(linked_service)) if linked_service == 'muzhack-green': link_to = 'muzhack-blue' else: assert linked_service == 'muzhack-blue' link_to = 'muzhack-green' info('Redeploying service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE) info('Linking to service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to), '--sync', 'lb'], stdout=subprocess.PIPE) info('Successfully switched production service to {}'.format(link_to)) ## Instruction: Make sure to update correct load balancer ## Code After: import argparse import subprocess import json import sys parser = argparse.ArgumentParser() args = parser.parse_args() def info(msg): sys.stdout.write('* {}\n'.format(msg)) sys.stdout.flush() info('Determining current production details...') output = subprocess.check_output(['tutum', 'service', 'inspect', 'lb.muzhack-staging']).decode( 'utf-8') data = json.loads(output) linked_service = data['linked_to_service'][0]['name'] info('Currently linked service is \'{}\''.format(linked_service)) if linked_service == 'muzhack-green': link_to = 'muzhack-blue' else: assert linked_service == 'muzhack-blue' link_to = 'muzhack-green' info('Redeploying service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'redeploy', '--sync', link_to], stdout=subprocess.PIPE) info('Linking to service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to), '--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE) info('Successfully switched production service to {}'.format(link_to))
# ... existing code ... info('Linking to service \'{}\'...'.format(link_to)) subprocess.check_call(['tutum', 'service', 'set', '--link-service', '{0}:{0}'.format(link_to), '--sync', 'lb.muzhack-staging'], stdout=subprocess.PIPE) info('Successfully switched production service to {}'.format(link_to)) # ... rest of the code ...
7677ef8bac0d367fa839d6d634054b20aebf8cb4
utils/RandomPass.java
utils/RandomPass.java
// // RandomPass.java // /** Generates a random 8-character password. */ public class RandomPass { private static final int PASS_LEN = 8; private static final char MIN_CHAR = '!'; private static final char MAX_CHAR = '~'; public static void main(String[] args) { int passLen = PASS_LEN; if (args.length > 0) { passLen = Integer.parseInt(args[0]); } StringBuffer sb = new StringBuffer(passLen + 2); int range = MAX_CHAR - MIN_CHAR + 1; for (int i=0; i<passLen; i++) { char c = (char) (range * Math.random() + MIN_CHAR); sb.append(c); } System.out.println(sb); } }
// // RandomPass.java // /** Generates a random 8-character password. */ public class RandomPass { private static final int PASS_LEN = 8; private static final char MIN_CHAR = '!'; private static final char MAX_CHAR = '~'; public static void main(String[] args) { int passLen = PASS_LEN; boolean alphaNum = false; for (String arg : args) { if (arg.equals("-nosymbols")) { alphaNum = true; } else { // assume argument is password character length passLen = Integer.parseInt(arg); } } StringBuffer sb = new StringBuffer(passLen + 2); int range = MAX_CHAR - MIN_CHAR + 1; int i = 0; while (i < passLen) { char c = (char) (range * Math.random() + MIN_CHAR); if (alphaNum) { boolean alpha = (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); boolean num = c >= '0' && c <= '9'; if (!alpha && !num) continue; } sb.append(c); i++; } System.out.println(sb); } }
Add "-nosymbols" argument for generating alphanumeric passwords.
Add "-nosymbols" argument for generating alphanumeric passwords.
Java
bsd-2-clause
scifio/scifio
java
## Code Before: // // RandomPass.java // /** Generates a random 8-character password. */ public class RandomPass { private static final int PASS_LEN = 8; private static final char MIN_CHAR = '!'; private static final char MAX_CHAR = '~'; public static void main(String[] args) { int passLen = PASS_LEN; if (args.length > 0) { passLen = Integer.parseInt(args[0]); } StringBuffer sb = new StringBuffer(passLen + 2); int range = MAX_CHAR - MIN_CHAR + 1; for (int i=0; i<passLen; i++) { char c = (char) (range * Math.random() + MIN_CHAR); sb.append(c); } System.out.println(sb); } } ## Instruction: Add "-nosymbols" argument for generating alphanumeric passwords. ## Code After: // // RandomPass.java // /** Generates a random 8-character password. */ public class RandomPass { private static final int PASS_LEN = 8; private static final char MIN_CHAR = '!'; private static final char MAX_CHAR = '~'; public static void main(String[] args) { int passLen = PASS_LEN; boolean alphaNum = false; for (String arg : args) { if (arg.equals("-nosymbols")) { alphaNum = true; } else { // assume argument is password character length passLen = Integer.parseInt(arg); } } StringBuffer sb = new StringBuffer(passLen + 2); int range = MAX_CHAR - MIN_CHAR + 1; int i = 0; while (i < passLen) { char c = (char) (range * Math.random() + MIN_CHAR); if (alphaNum) { boolean alpha = (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); boolean num = c >= '0' && c <= '9'; if (!alpha && !num) continue; } sb.append(c); i++; } System.out.println(sb); } }
# ... existing code ... public static void main(String[] args) { int passLen = PASS_LEN; boolean alphaNum = false; for (String arg : args) { if (arg.equals("-nosymbols")) { alphaNum = true; } else { // assume argument is password character length passLen = Integer.parseInt(arg); } } StringBuffer sb = new StringBuffer(passLen + 2); int range = MAX_CHAR - MIN_CHAR + 1; int i = 0; while (i < passLen) { char c = (char) (range * Math.random() + MIN_CHAR); if (alphaNum) { boolean alpha = (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); boolean num = c >= '0' && c <= '9'; if (!alpha && !num) continue; } sb.append(c); i++; } System.out.println(sb); } # ... rest of the code ...
350380095b84bce5bd06e1ac046d9036fd7ab0cd
bluebottle/partners/serializers.py
bluebottle/partners/serializers.py
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectSerializer, ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) projects = ProjectPreviewSerializer(source='projects') description = serializers.CharField(source='description') image = ImageSerializer(required=False) class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.bb_projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) projects = ProjectPreviewSerializer(source='projects') description = serializers.CharField(source='description') image = ImageSerializer(required=False) class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
Use a simpler serializer that does not require people_requested/people_registered annotations / fields
Use a simpler serializer that does not require people_requested/people_registered annotations / fields
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle
python
## Code Before: from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.projects.serializers import ProjectSerializer, ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) projects = ProjectPreviewSerializer(source='projects') description = serializers.CharField(source='description') image = ImageSerializer(required=False) class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image') ## Instruction: Use a simpler serializer that does not require people_requested/people_registered annotations / fields ## Code After: from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.bb_projects.serializers import ProjectPreviewSerializer from rest_framework import serializers class PartnerOrganizationSerializer(serializers.ModelSerializer): id = serializers.CharField(source='slug', read_only=True) projects = ProjectPreviewSerializer(source='projects') description = serializers.CharField(source='description') image = ImageSerializer(required=False) class Meta: model = PartnerOrganization fields = ('id', 'name', 'projects', 'description', 'image')
... from bluebottle.bluebottle_drf2.serializers import ImageSerializer from bluebottle.projects.models import PartnerOrganization from bluebottle.bb_projects.serializers import ProjectPreviewSerializer from rest_framework import serializers ...
410cf7dd34491d81ec7311cc5116bce9351ef902
okhttp/src/main/java/com/squareup/okhttp/internal/http/HttpMethod.java
okhttp/src/main/java/com/squareup/okhttp/internal/http/HttpMethod.java
/* * Copyright (C) 2014 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; public final class HttpMethod { public static boolean invalidatesCache(String method) { return method.equals("POST") || method.equals("PATCH") || method.equals("PUT") || method.equals("DELETE"); } public static boolean requiresRequestBody(String method) { return method.equals("POST") || method.equals("PUT") || method.equals("PATCH"); } public static boolean permitsRequestBody(String method) { return requiresRequestBody(method) || method.equals("DELETE"); // Permitted as spec is ambiguous. } private HttpMethod() { } }
/* * Copyright (C) 2014 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; public final class HttpMethod { public static boolean invalidatesCache(String method) { return method.equals("POST") || method.equals("PATCH") || method.equals("PUT") || method.equals("DELETE") || method.equals("MOVE"); // WebDAV } public static boolean requiresRequestBody(String method) { return method.equals("POST") || method.equals("PUT") || method.equals("PATCH") || method.equals("PROPPATCH") // WebDAV || method.equals("REPORT"); // CalDAV/CardDAV (defined in WebDAV Versioning) } public static boolean permitsRequestBody(String method) { return requiresRequestBody(method) || method.equals("DELETE") // Permitted as spec is ambiguous. || method.equals("PROPFIND") // (WebDAV) without body: request <allprop/> || method.equals("MKCOL") // (WebDAV) may contain a body, but behaviour is unspecified || method.equals("LOCK"); // (WebDAV) body: create lock, without body: refresh lock } private HttpMethod() { } }
Add support for WebDAV HTTP methods (RFC 4918)
Add support for WebDAV HTTP methods (RFC 4918)
Java
apache-2.0
rschilling/okhttp,SunnyDayDev/okhttp,xph906/NewOKHttp,Synix/okhttp,cmzy/okhttp,ze-pequeno/okhttp,Voxer/okhttp,artem-zinnatullin/okhttp,cnoldtree/okhttp,teffy/okhttp,apoorvmahajan/okhttp,nfuller/okhttp,nachocove/okhttp,SunnyDayDev/okhttp,wfxiang08/okhttp,cketti/okhttp,yschimke/okhttp,ansman/okhttp,hgl888/okhttp,zmarkan/okhttp,ze-pequeno/okhttp,nfuller/okhttp,xph906/NewOKHttp,ankit3005/okhttp,tiarebalbi/okhttp,ankit3005/okhttp,VioletLife/okhttp,joansmith/okhttp,cnoldtree/okhttp,yschimke/okhttp,germanattanasio/okhttp,teffy/okhttp,artem-zinnatullin/okhttp,Synix/okhttp,square/okhttp,germanattanasio/okhttp,apoorvmahajan/okhttp,artem-zinnatullin/okhttp,mjbenedict/okhttp,jrodbx/okhttp,joansmith/okhttp,hgl888/okhttp,SunnyDayDev/okhttp,cketti/okhttp,VioletLife/okhttp,wfxiang08/okhttp,nachocove/okhttp,yschimke/okhttp,Synix/okhttp,chaitanyajun12/okhttp,square/okhttp,chaitanyajun12/okhttp,teffy/okhttp,zmarkan/okhttp,ansman/okhttp,cmzy/okhttp,NightlyNexus/okhttp,Voxer/okhttp,cketti/okhttp,xph906/NetProphet,jrodbx/okhttp,yakatak/okhttp,germanattanasio/okhttp,tiarebalbi/okhttp,ankit3005/okhttp,apoorvmahajan/okhttp,xph906/NewOKHttp,qingsong-xu/okhttp,chaitanyajun12/okhttp,xph906/NetProphet,ze-pequeno/okhttp,jrodbx/okhttp,hgl888/okhttp,VioletLife/okhttp,wfxiang08/okhttp,Voxer/okhttp,ansman/okhttp,mjbenedict/okhttp,cnoldtree/okhttp,zmarkan/okhttp,yakatak/okhttp,yakatak/okhttp,xph906/NetProphet,NightlyNexus/okhttp,tiarebalbi/okhttp,qingsong-xu/okhttp,rschilling/okhttp,square/okhttp,nachocove/okhttp,mjbenedict/okhttp,rschilling/okhttp,joansmith/okhttp,NightlyNexus/okhttp,qingsong-xu/okhttp,nfuller/okhttp,apoorvmahajan/okhttp
java
## Code Before: /* * Copyright (C) 2014 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; public final class HttpMethod { public static boolean invalidatesCache(String method) { return method.equals("POST") || method.equals("PATCH") || method.equals("PUT") || method.equals("DELETE"); } public static boolean requiresRequestBody(String method) { return method.equals("POST") || method.equals("PUT") || method.equals("PATCH"); } public static boolean permitsRequestBody(String method) { return requiresRequestBody(method) || method.equals("DELETE"); // Permitted as spec is ambiguous. } private HttpMethod() { } } ## Instruction: Add support for WebDAV HTTP methods (RFC 4918) ## Code After: /* * Copyright (C) 2014 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; public final class HttpMethod { public static boolean invalidatesCache(String method) { return method.equals("POST") || method.equals("PATCH") || method.equals("PUT") || method.equals("DELETE") || method.equals("MOVE"); // WebDAV } public static boolean requiresRequestBody(String method) { return method.equals("POST") || method.equals("PUT") || method.equals("PATCH") || method.equals("PROPPATCH") // WebDAV || method.equals("REPORT"); // CalDAV/CardDAV (defined in WebDAV Versioning) } public static boolean permitsRequestBody(String method) { return requiresRequestBody(method) || method.equals("DELETE") // Permitted as spec is ambiguous. || method.equals("PROPFIND") // (WebDAV) without body: request <allprop/> || method.equals("MKCOL") // (WebDAV) may contain a body, but behaviour is unspecified || method.equals("LOCK"); // (WebDAV) body: create lock, without body: refresh lock } private HttpMethod() { } }
// ... existing code ... return method.equals("POST") || method.equals("PATCH") || method.equals("PUT") || method.equals("DELETE") || method.equals("MOVE"); // WebDAV } public static boolean requiresRequestBody(String method) { return method.equals("POST") || method.equals("PUT") || method.equals("PATCH") || method.equals("PROPPATCH") // WebDAV || method.equals("REPORT"); // CalDAV/CardDAV (defined in WebDAV Versioning) } public static boolean permitsRequestBody(String method) { return requiresRequestBody(method) || method.equals("DELETE") // Permitted as spec is ambiguous. || method.equals("PROPFIND") // (WebDAV) without body: request <allprop/> || method.equals("MKCOL") // (WebDAV) may contain a body, but behaviour is unspecified || method.equals("LOCK"); // (WebDAV) body: create lock, without body: refresh lock } private HttpMethod() { // ... rest of the code ...
8365945ef62b8f9cd37022302e8ee6299716720d
masterfirefoxos/base/helpers.py
masterfirefoxos/base/helpers.py
from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup static = register.function(static_helper) @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
Add static helper for jinja2
Add static helper for jinja2
Python
mpl-2.0
craigcook/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos,enng0227/masterfirefoxos,glogiotatidis/masterfirefoxos,mozilla/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,enng0227/masterfirefoxos,mozilla/masterfirefoxos,glogiotatidis/masterfirefoxos,glogiotatidis/masterfirefoxos,liu21st/masterfirefoxos,liu21st/masterfirefoxos,enng0227/masterfirefoxos,craigcook/masterfirefoxos,liu21st/masterfirefoxos,mozilla/masterfirefoxos,craigcook/masterfirefoxos
python
## Code Before: from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request)) ## Instruction: Add static helper for jinja2 ## Code After: from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup static = register.function(static_helper) @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request))
// ... existing code ... from django.contrib.staticfiles.templatetags.staticfiles import static as static_helper from feincms.templatetags.feincms_tags import feincms_render_region from jingo import register from jinja2 import Markup static = register.function(static_helper) @register.function def render_region(feincms_page, region, request): return Markup(feincms_render_region(None, feincms_page, region, request)) // ... rest of the code ...
f959e9213f27cee5ed5739655d4f85c7d0d442aa
tests/functional/customer/test_notification.py
tests/functional/customer/test_notification.py
from http import client as http_client from oscar.test.testcases import WebTestCase from oscar.apps.customer.notifications import services from oscar.test.factories import UserFactory from django.urls import reverse from oscar.apps.customer.models import Notification class TestAUserWithUnreadNotifications(WebTestCase): def setUp(self): self.user = UserFactory() services.notify_user(self.user, "Test message") def test_can_see_them_in_page_header(self): homepage = self.app.get('/', user=self.user) self.assertEqual(1, homepage.context['num_unread_notifications']) def test_notification_list_view_shows_user_notifications(self): response = self.app.get(reverse('customer:notifications-inbox'), user=self.user) self.assertEqual(1, len(response.context['notifications'])) self.assertEqual(False, response.context['notifications'][0].is_read) def test_notification_marked_as_read(self): n = Notification.objects.first() path = reverse('customer:notifications-detail', kwargs={'pk': n.id}) response = self.app.get(path, user=self.user) # notification should be marked as read self.assertEqual(http_client.OK, response.status_code) n.refresh_from_db() self.assertTrue(n.is_read)
from http import client as http_client from oscar.test.testcases import WebTestCase from oscar.apps.customer.notifications import services from oscar.test.factories import UserFactory from django.urls import reverse from oscar.apps.customer.models import Notification class TestAUserWithUnreadNotifications(WebTestCase): def setUp(self): self.user = UserFactory() services.notify_user(self.user, "Test message") def test_can_see_them_in_page_header(self): homepage = self.app.get('/', user=self.user) if homepage.status_code == 302: homepage = homepage.follow() self.assertEqual(1, homepage.context['num_unread_notifications']) def test_notification_list_view_shows_user_notifications(self): response = self.app.get(reverse('customer:notifications-inbox'), user=self.user) self.assertEqual(1, len(response.context['notifications'])) self.assertEqual(False, response.context['notifications'][0].is_read) def test_notification_marked_as_read(self): n = Notification.objects.first() path = reverse('customer:notifications-detail', kwargs={'pk': n.id}) response = self.app.get(path, user=self.user) # notification should be marked as read self.assertEqual(http_client.OK, response.status_code) n.refresh_from_db() self.assertTrue(n.is_read)
Update test in case if home page has redirection.
Update test in case if home page has redirection.
Python
bsd-3-clause
solarissmoke/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sasha0/django-oscar,django-oscar/django-oscar
python
## Code Before: from http import client as http_client from oscar.test.testcases import WebTestCase from oscar.apps.customer.notifications import services from oscar.test.factories import UserFactory from django.urls import reverse from oscar.apps.customer.models import Notification class TestAUserWithUnreadNotifications(WebTestCase): def setUp(self): self.user = UserFactory() services.notify_user(self.user, "Test message") def test_can_see_them_in_page_header(self): homepage = self.app.get('/', user=self.user) self.assertEqual(1, homepage.context['num_unread_notifications']) def test_notification_list_view_shows_user_notifications(self): response = self.app.get(reverse('customer:notifications-inbox'), user=self.user) self.assertEqual(1, len(response.context['notifications'])) self.assertEqual(False, response.context['notifications'][0].is_read) def test_notification_marked_as_read(self): n = Notification.objects.first() path = reverse('customer:notifications-detail', kwargs={'pk': n.id}) response = self.app.get(path, user=self.user) # notification should be marked as read self.assertEqual(http_client.OK, response.status_code) n.refresh_from_db() self.assertTrue(n.is_read) ## Instruction: Update test in case if home page has redirection. ## Code After: from http import client as http_client from oscar.test.testcases import WebTestCase from oscar.apps.customer.notifications import services from oscar.test.factories import UserFactory from django.urls import reverse from oscar.apps.customer.models import Notification class TestAUserWithUnreadNotifications(WebTestCase): def setUp(self): self.user = UserFactory() services.notify_user(self.user, "Test message") def test_can_see_them_in_page_header(self): homepage = self.app.get('/', user=self.user) if homepage.status_code == 302: homepage = homepage.follow() self.assertEqual(1, homepage.context['num_unread_notifications']) def test_notification_list_view_shows_user_notifications(self): response = self.app.get(reverse('customer:notifications-inbox'), user=self.user) self.assertEqual(1, len(response.context['notifications'])) self.assertEqual(False, response.context['notifications'][0].is_read) def test_notification_marked_as_read(self): n = Notification.objects.first() path = reverse('customer:notifications-detail', kwargs={'pk': n.id}) response = self.app.get(path, user=self.user) # notification should be marked as read self.assertEqual(http_client.OK, response.status_code) n.refresh_from_db() self.assertTrue(n.is_read)
# ... existing code ... def test_can_see_them_in_page_header(self): homepage = self.app.get('/', user=self.user) if homepage.status_code == 302: homepage = homepage.follow() self.assertEqual(1, homepage.context['num_unread_notifications']) def test_notification_list_view_shows_user_notifications(self): # ... rest of the code ...
dc56d2634f05ec57ba594c5c70193d2b113c53e5
Mathematics/Fundamentals/jim-and-the-jokes.py
Mathematics/Fundamentals/jim-and-the-jokes.py
def base(m,d): # Converts d (base m) to its decimal equivalent result = 0 c = 1 while(d > 0): x = d%10 if(x >= m): return -1 d /= 10 result += x*c c *= m return result n = int(raw_input()) decimals = [0]*37 # 37 = 12*3 + 1 = max(base(i,j)) for 1<=i<=12 (months) and 1<=j<=31 (days) for i in range(n): m,d = [int(j) for j in raw_input().split()] x = base(m,d) result = 0 if(x != -1): result += decimals[x] decimals[x] += 1 print result
def base(m,d): # Converts d (base m) to its decimal equivalent result = 0 c = 1 while(d > 0): x = d%10 if(x >= m): return -1 d /= 10 result += x*c c *= m return result n = int(raw_input()) decimals = [0]*38 # 37 = 12*3 + 1 = max(base(i,j)) for 1<=i<=12 (months) and 1<=j<=31 (days) for i in range(n): m,d = [int(j) for j in raw_input().split()] x = base(m,d) #print 'x = ' + str(x) if(x != -1): decimals[x] += 1 answer = 0 for j in decimals: # Note that the following if-else is necessary in order # to deal with large integers if j%2 == 0: answer += (j/2)*(j-1) else: answer += ((j-1)/2)*j print answer
Fix code to deal with large numbers
Fix code to deal with large numbers
Python
mit
ugaliguy/HackerRank,ugaliguy/HackerRank,ugaliguy/HackerRank
python
## Code Before: def base(m,d): # Converts d (base m) to its decimal equivalent result = 0 c = 1 while(d > 0): x = d%10 if(x >= m): return -1 d /= 10 result += x*c c *= m return result n = int(raw_input()) decimals = [0]*37 # 37 = 12*3 + 1 = max(base(i,j)) for 1<=i<=12 (months) and 1<=j<=31 (days) for i in range(n): m,d = [int(j) for j in raw_input().split()] x = base(m,d) result = 0 if(x != -1): result += decimals[x] decimals[x] += 1 print result ## Instruction: Fix code to deal with large numbers ## Code After: def base(m,d): # Converts d (base m) to its decimal equivalent result = 0 c = 1 while(d > 0): x = d%10 if(x >= m): return -1 d /= 10 result += x*c c *= m return result n = int(raw_input()) decimals = [0]*38 # 37 = 12*3 + 1 = max(base(i,j)) for 1<=i<=12 (months) and 1<=j<=31 (days) for i in range(n): m,d = [int(j) for j in raw_input().split()] x = base(m,d) #print 'x = ' + str(x) if(x != -1): decimals[x] += 1 answer = 0 for j in decimals: # Note that the following if-else is necessary in order # to deal with large integers if j%2 == 0: answer += (j/2)*(j-1) else: answer += ((j-1)/2)*j print answer
# ... existing code ... return result n = int(raw_input()) decimals = [0]*38 # 37 = 12*3 + 1 = max(base(i,j)) for 1<=i<=12 (months) and 1<=j<=31 (days) for i in range(n): m,d = [int(j) for j in raw_input().split()] x = base(m,d) #print 'x = ' + str(x) if(x != -1): decimals[x] += 1 answer = 0 for j in decimals: # Note that the following if-else is necessary in order # to deal with large integers if j%2 == 0: answer += (j/2)*(j-1) else: answer += ((j-1)/2)*j print answer # ... rest of the code ...
3cef3e4774923b81e622f03aee44a933293c6a8d
modelview/migrations/0036_auto_20170322_1622.py
modelview/migrations/0036_auto_20170322_1622.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('modelview', '0035_auto_20160426_1721'), ] operations = [ migrations.AlterField( model_name='basicfactsheet', name='source_of_funding', field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ #('modelview', '0035_auto_20160426_1721'), ('modelview', '0035_auto_20170724_1801'), ] operations = [ migrations.AlterField( model_name='basicfactsheet', name='source_of_funding', field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'), ), ]
Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724
Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724
Python
agpl-3.0
openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform
python
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('modelview', '0035_auto_20160426_1721'), ] operations = [ migrations.AlterField( model_name='basicfactsheet', name='source_of_funding', field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'), ), ] ## Instruction: Change reference from modelview/0035_auto_20160426 to 0035_auto_20170724 ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ #('modelview', '0035_auto_20160426_1721'), ('modelview', '0035_auto_20170724_1801'), ] operations = [ migrations.AlterField( model_name='basicfactsheet', name='source_of_funding', field=models.CharField(help_text='What is the main source of funding?', max_length=200, null=True, verbose_name='Source of funding'), ), ]
... class Migration(migrations.Migration): dependencies = [ #('modelview', '0035_auto_20160426_1721'), ('modelview', '0035_auto_20170724_1801'), ] operations = [ ...
be8b6e9b3cd81a22d85046c769e0d267b41004e3
MoMMI/types.py
MoMMI/types.py
class SnowflakeID(int): """ Represents a Discord Snowflake ID. """ pass
from typing import Union class SnowflakeID(int): """ Represents a Discord Snowflake ID. """ pass MIdentifier = Union[SnowflakeID, str]
Add string and snowflake identifier union.
Add string and snowflake identifier union.
Python
mit
PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI
python
## Code Before: class SnowflakeID(int): """ Represents a Discord Snowflake ID. """ pass ## Instruction: Add string and snowflake identifier union. ## Code After: from typing import Union class SnowflakeID(int): """ Represents a Discord Snowflake ID. """ pass MIdentifier = Union[SnowflakeID, str]
... from typing import Union class SnowflakeID(int): """ Represents a Discord Snowflake ID. """ pass MIdentifier = Union[SnowflakeID, str] ...
fff41b90b96f0a12a69afcf342812bdf24538e64
setup.py
setup.py
from setuptools import setup setup( name='fedimg', version='0.0.1', description='Service to automatically upload built Fedora images \ to internal and external cloud providers.', classifiers=[ "Programming Language :: Python :: 2", "License :: OSI Approved :: GNU Affero General Public License \ v3 or later (AGPLv3+)", ], keywords='python Fedora cloud image uploader', author='David Gay', author_email='[email protected]', url='https://github.com/oddshocks/fedimg', license='AGPLv3+', include_package_data=True, zip_safe=False, install_requires=["fedmsg", "apache-libcloud"], packages=[], entry_points=""" [moksha.consumer] kojiconsumer = fedimg.consumers:KojiConsumer """, )
from setuptools import setup setup( name='fedimg', version='0.0.1', description='Service to automatically upload built Fedora images \ to internal and external cloud providers.', classifiers=[ "Programming Language :: Python :: 2", "License :: OSI Approved :: GNU Affero General Public License \ v3 or later (AGPLv3+)", ], keywords='python Fedora cloud image uploader', author='David Gay', author_email='[email protected]', url='https://github.com/oddshocks/fedimg', license='AGPLv3+', include_package_data=True, zip_safe=False, install_requires=["fedmsg", "apache-libcloud", "paramiko"], packages=[], entry_points=""" [moksha.consumer] kojiconsumer = fedimg.consumers:KojiConsumer """, )
Add paramiko to install_requires since libcloud deploy_node() requires it.
Add paramiko to install_requires since libcloud deploy_node() requires it.
Python
agpl-3.0
fedora-infra/fedimg,fedora-infra/fedimg
python
## Code Before: from setuptools import setup setup( name='fedimg', version='0.0.1', description='Service to automatically upload built Fedora images \ to internal and external cloud providers.', classifiers=[ "Programming Language :: Python :: 2", "License :: OSI Approved :: GNU Affero General Public License \ v3 or later (AGPLv3+)", ], keywords='python Fedora cloud image uploader', author='David Gay', author_email='[email protected]', url='https://github.com/oddshocks/fedimg', license='AGPLv3+', include_package_data=True, zip_safe=False, install_requires=["fedmsg", "apache-libcloud"], packages=[], entry_points=""" [moksha.consumer] kojiconsumer = fedimg.consumers:KojiConsumer """, ) ## Instruction: Add paramiko to install_requires since libcloud deploy_node() requires it. ## Code After: from setuptools import setup setup( name='fedimg', version='0.0.1', description='Service to automatically upload built Fedora images \ to internal and external cloud providers.', classifiers=[ "Programming Language :: Python :: 2", "License :: OSI Approved :: GNU Affero General Public License \ v3 or later (AGPLv3+)", ], keywords='python Fedora cloud image uploader', author='David Gay', author_email='[email protected]', url='https://github.com/oddshocks/fedimg', license='AGPLv3+', include_package_data=True, zip_safe=False, install_requires=["fedmsg", "apache-libcloud", "paramiko"], packages=[], entry_points=""" [moksha.consumer] kojiconsumer = fedimg.consumers:KojiConsumer """, )
# ... existing code ... include_package_data=True, zip_safe=False, install_requires=["fedmsg", "apache-libcloud", "paramiko"], packages=[], entry_points=""" [moksha.consumer] # ... rest of the code ...
d20039737d1e25f4462c4865347fa22411045677
budgetsupervisor/users/models.py
budgetsupervisor/users/models.py
from django.db import models from django.contrib.auth.models import AbstractUser from django.conf import settings from django.db.models.signals import post_save from saltedge.factory import get_saltedge_app class User(AbstractUser): pass class ProfileManager(models.Manager): def create_in_saltedge(self, profile): app = get_saltedge_app() url = "https://www.saltedge.com/api/v5/customers" payload = json.dumps({"data": {"identifier": profile.user.id}}) response = app.post(url, payload) data = response.json() profile.external_id = data["data"]["id"] profile.save() class Profile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) external_id = models.BigIntegerField(blank=True, null=True) objects = ProfileManager() def __str__(self): return str(self.user) def create_user_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
from django.db import models from django.contrib.auth.models import AbstractUser from django.conf import settings from django.db.models.signals import post_save from saltedge.factory import get_saltedge_app class User(AbstractUser): pass class ProfileManager(models.Manager): def create_in_saltedge(self, profile): app = get_saltedge_app() url = "https://www.saltedge.com/api/v5/customers" payload = json.dumps({"data": {"identifier": profile.user.id}}) response = app.post(url, payload) data = response.json() profile.external_id = data["data"]["id"] profile.save() def remove_from_saltedge(self, profile): pass class Profile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) external_id = models.BigIntegerField(blank=True, null=True) objects = ProfileManager() def __str__(self): return str(self.user) def create_user_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
Add placeholder for removing customer from saltedge
Add placeholder for removing customer from saltedge
Python
mit
ltowarek/budget-supervisor
python
## Code Before: from django.db import models from django.contrib.auth.models import AbstractUser from django.conf import settings from django.db.models.signals import post_save from saltedge.factory import get_saltedge_app class User(AbstractUser): pass class ProfileManager(models.Manager): def create_in_saltedge(self, profile): app = get_saltedge_app() url = "https://www.saltedge.com/api/v5/customers" payload = json.dumps({"data": {"identifier": profile.user.id}}) response = app.post(url, payload) data = response.json() profile.external_id = data["data"]["id"] profile.save() class Profile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) external_id = models.BigIntegerField(blank=True, null=True) objects = ProfileManager() def __str__(self): return str(self.user) def create_user_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL) ## Instruction: Add placeholder for removing customer from saltedge ## Code After: from django.db import models from django.contrib.auth.models import AbstractUser from django.conf import settings from django.db.models.signals import post_save from saltedge.factory import get_saltedge_app class User(AbstractUser): pass class ProfileManager(models.Manager): def create_in_saltedge(self, profile): app = get_saltedge_app() url = "https://www.saltedge.com/api/v5/customers" payload = json.dumps({"data": {"identifier": profile.user.id}}) response = app.post(url, payload) data = response.json() profile.external_id = data["data"]["id"] profile.save() def remove_from_saltedge(self, profile): pass class Profile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) external_id = models.BigIntegerField(blank=True, null=True) objects = ProfileManager() def __str__(self): return str(self.user) def create_user_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) post_save.connect(create_user_profile, sender=settings.AUTH_USER_MODEL)
// ... existing code ... profile.external_id = data["data"]["id"] profile.save() def remove_from_saltedge(self, profile): pass class Profile(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) // ... rest of the code ...
b3fb2ba913a836a1e198795019870e318879d5f7
dictionary/forms.py
dictionary/forms.py
from django import forms from django.forms.models import BaseModelFormSet from django.utils.translation import ugettext_lazy as _ class BaseWordFormSet(BaseModelFormSet): def add_fields(self, form, index): super(BaseWordFormSet, self).add_fields(form, index) form.fields["isLocal"] = forms.BooleanField(label=_("Local"))
from django import forms from django.forms.models import BaseModelFormSet from django.utils.translation import ugettext_lazy as _ class BaseWordFormSet(BaseModelFormSet): def add_fields(self, form, index): super(BaseWordFormSet, self).add_fields(form, index) form.fields["isLocal"] = forms.BooleanField(label=_("Local"), required=False)
Make sure the isLocal BooleanField is not required
Make sure the isLocal BooleanField is not required
Python
agpl-3.0
sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer
python
## Code Before: from django import forms from django.forms.models import BaseModelFormSet from django.utils.translation import ugettext_lazy as _ class BaseWordFormSet(BaseModelFormSet): def add_fields(self, form, index): super(BaseWordFormSet, self).add_fields(form, index) form.fields["isLocal"] = forms.BooleanField(label=_("Local")) ## Instruction: Make sure the isLocal BooleanField is not required ## Code After: from django import forms from django.forms.models import BaseModelFormSet from django.utils.translation import ugettext_lazy as _ class BaseWordFormSet(BaseModelFormSet): def add_fields(self, form, index): super(BaseWordFormSet, self).add_fields(form, index) form.fields["isLocal"] = forms.BooleanField(label=_("Local"), required=False)
# ... existing code ... class BaseWordFormSet(BaseModelFormSet): def add_fields(self, form, index): super(BaseWordFormSet, self).add_fields(form, index) form.fields["isLocal"] = forms.BooleanField(label=_("Local"), required=False) # ... rest of the code ...
40c7d72ca275680754e38fd8c926ee364d007198
src/test/java/com/vtence/molecule/BodyPartTest.java
src/test/java/com/vtence/molecule/BodyPartTest.java
package com.vtence.molecule; import org.junit.Test; import java.io.IOException; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; public class BodyPartTest { @Test public void decodesTextContentAccordingToContentTypeCharset() throws IOException { String originalText = "Les naïfs ægithales hâtifs pondant à Noël où il gèle..."; BodyPart part = new BodyPart().contentType("text/plain; charset=utf-16").content(originalText); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoContentTypeIsSpecified() throws IOException { String originalText = "sont sûrs d'être déçus..."; BodyPart part = new BodyPart().content(originalText); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoCharsetIsSpecified() throws IOException { String originalText = "en voyant leurs drôles d'oeufs abîmés."; BodyPart part = new BodyPart().contentType("text/plain").content(originalText); assertThat("decoded text", part.content(), equalTo(originalText)); } }
package com.vtence.molecule; import org.junit.Test; import java.io.IOException; import static com.vtence.molecule.helpers.Charsets.UTF_16; import static com.vtence.molecule.helpers.Charsets.UTF_8; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; public class BodyPartTest { @Test public void decodesTextContentAccordingToContentTypeCharset() throws IOException { String originalText = "Les naïfs ægithales hâtifs pondant à Noël où il gèle..."; BodyPart part = new BodyPart().contentType("text/plain; charset=utf-16").content(originalText.getBytes(UTF_16)); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoContentTypeIsSpecified() throws IOException { String originalText = "sont sûrs d'être déçus..."; BodyPart part = new BodyPart().content(originalText.getBytes(UTF_8)); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoCharsetIsSpecified() throws IOException { String originalText = "en voyant leurs drôles d'oeufs abîmés."; BodyPart part = new BodyPart().contentType("text/plain").content(originalText.getBytes(UTF_8)); assertThat("decoded text", part.content(), equalTo(originalText)); } }
Make sure our body part test actually proves we're using the right encoding
Make sure our body part test actually proves we're using the right encoding
Java
mit
ensonik/molecule,ensonik/molecule,testinfected/molecule,ensonik/molecule,testinfected/molecule,testinfected/molecule,ensonik/molecule,testinfected/molecule,ensonik/molecule,testinfected/molecule
java
## Code Before: package com.vtence.molecule; import org.junit.Test; import java.io.IOException; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; public class BodyPartTest { @Test public void decodesTextContentAccordingToContentTypeCharset() throws IOException { String originalText = "Les naïfs ægithales hâtifs pondant à Noël où il gèle..."; BodyPart part = new BodyPart().contentType("text/plain; charset=utf-16").content(originalText); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoContentTypeIsSpecified() throws IOException { String originalText = "sont sûrs d'être déçus..."; BodyPart part = new BodyPart().content(originalText); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoCharsetIsSpecified() throws IOException { String originalText = "en voyant leurs drôles d'oeufs abîmés."; BodyPart part = new BodyPart().contentType("text/plain").content(originalText); assertThat("decoded text", part.content(), equalTo(originalText)); } } ## Instruction: Make sure our body part test actually proves we're using the right encoding ## Code After: package com.vtence.molecule; import org.junit.Test; import java.io.IOException; import static com.vtence.molecule.helpers.Charsets.UTF_16; import static com.vtence.molecule.helpers.Charsets.UTF_8; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; public class BodyPartTest { @Test public void decodesTextContentAccordingToContentTypeCharset() throws IOException { String originalText = "Les naïfs ægithales hâtifs pondant à Noël où il gèle..."; BodyPart part = new BodyPart().contentType("text/plain; charset=utf-16").content(originalText.getBytes(UTF_16)); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoContentTypeIsSpecified() throws IOException { String originalText = "sont sûrs d'être déçus..."; BodyPart part = new BodyPart().content(originalText.getBytes(UTF_8)); assertThat("decoded text", part.content(), equalTo(originalText)); } @Test public void defaultsToUTF8DecodingWhenNoCharsetIsSpecified() throws IOException { String originalText = "en voyant leurs drôles d'oeufs abîmés."; BodyPart part = new BodyPart().contentType("text/plain").content(originalText.getBytes(UTF_8)); assertThat("decoded text", part.content(), equalTo(originalText)); } }
// ... existing code ... import java.io.IOException; import static com.vtence.molecule.helpers.Charsets.UTF_16; import static com.vtence.molecule.helpers.Charsets.UTF_8; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; // ... modified code ... public void decodesTextContentAccordingToContentTypeCharset() throws IOException { String originalText = "Les naïfs ægithales hâtifs pondant à Noël où il gèle..."; BodyPart part = new BodyPart().contentType("text/plain; charset=utf-16").content(originalText.getBytes(UTF_16)); assertThat("decoded text", part.content(), equalTo(originalText)); } ... public void defaultsToUTF8DecodingWhenNoContentTypeIsSpecified() throws IOException { String originalText = "sont sûrs d'être déçus..."; BodyPart part = new BodyPart().content(originalText.getBytes(UTF_8)); assertThat("decoded text", part.content(), equalTo(originalText)); } ... public void defaultsToUTF8DecodingWhenNoCharsetIsSpecified() throws IOException { String originalText = "en voyant leurs drôles d'oeufs abîmés."; BodyPart part = new BodyPart().contentType("text/plain").content(originalText.getBytes(UTF_8)); assertThat("decoded text", part.content(), equalTo(originalText)); } // ... rest of the code ...
2ba603b8316486f1a562e9ce77aa0efb5d1cd96d
src/org/rust/lang/structure/RustStructDeclFieldTreeElement.kt
src/org/rust/lang/structure/RustStructDeclFieldTreeElement.kt
package org.rust.lang.structure import com.intellij.ide.structureView.StructureViewTreeElement import com.intellij.ide.structureView.impl.common.PsiTreeElementBase import com.intellij.psi.NavigatablePsiElement import org.rust.lang.core.psi.RustStructDeclField class RustStructDeclFieldTreeElement(element: RustStructDeclField) : PsiTreeElementBase<RustStructDeclField>(element) { override fun getPresentableText(): String? { return element?.identifier?.text } override fun getChildrenBase(): Collection<StructureViewTreeElement> { return arrayListOf<StructureViewTreeElement>() } }
package org.rust.lang.structure import com.intellij.ide.structureView.StructureViewTreeElement import com.intellij.ide.structureView.impl.common.PsiTreeElementBase import com.intellij.psi.NavigatablePsiElement import org.rust.lang.core.psi.RustStructDeclField class RustStructDeclFieldTreeElement(element: RustStructDeclField) : PsiTreeElementBase<RustStructDeclField>(element) { override fun getPresentableText(): String? { return "${element?.identifier?.text}: ${element?.typeSum?.text}" } override fun getChildrenBase(): Collection<StructureViewTreeElement> { return arrayListOf<StructureViewTreeElement>() } }
Structure View: Include type info for struct fields
Structure View: Include type info for struct fields
Kotlin
mit
anton-okolelov/intellij-rust,alygin/intellij-rust,Undin/intellij-rust,intellij-rust/intellij-rust,himikof/intellij-rust,Undin/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,Turbo87/intellij-rust,intellij-rust/intellij-rust,d9n/intellij-rust,anton-okolelov/intellij-rust,alygin/intellij-rust,ujpv/intellij-rust,intellij-rust/intellij-rust,Turbo87/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,alygin/intellij-rust,Turbo87/intellij-rust,ThoseGrapefruits/intellij-rust,ujpv/intellij-rust,Turbo87/intellij-rust,ujpv/intellij-rust,alygin/intellij-rust,himikof/intellij-rust,anton-okolelov/intellij-rust,Undin/intellij-rust,alygin/intellij-rust,himikof/intellij-rust,d9n/intellij-rust,himikof/intellij-rust,intellij-rust/intellij-rust,ThoseGrapefruits/intellij-rust,ThoseGrapefruits/intellij-rust,Undin/intellij-rust,anton-okolelov/intellij-rust,ujpv/intellij-rust,ujpv/intellij-rust,d9n/intellij-rust,himikof/intellij-rust,anton-okolelov/intellij-rust,d9n/intellij-rust,d9n/intellij-rust,ThoseGrapefruits/intellij-rust
kotlin
## Code Before: package org.rust.lang.structure import com.intellij.ide.structureView.StructureViewTreeElement import com.intellij.ide.structureView.impl.common.PsiTreeElementBase import com.intellij.psi.NavigatablePsiElement import org.rust.lang.core.psi.RustStructDeclField class RustStructDeclFieldTreeElement(element: RustStructDeclField) : PsiTreeElementBase<RustStructDeclField>(element) { override fun getPresentableText(): String? { return element?.identifier?.text } override fun getChildrenBase(): Collection<StructureViewTreeElement> { return arrayListOf<StructureViewTreeElement>() } } ## Instruction: Structure View: Include type info for struct fields ## Code After: package org.rust.lang.structure import com.intellij.ide.structureView.StructureViewTreeElement import com.intellij.ide.structureView.impl.common.PsiTreeElementBase import com.intellij.psi.NavigatablePsiElement import org.rust.lang.core.psi.RustStructDeclField class RustStructDeclFieldTreeElement(element: RustStructDeclField) : PsiTreeElementBase<RustStructDeclField>(element) { override fun getPresentableText(): String? { return "${element?.identifier?.text}: ${element?.typeSum?.text}" } override fun getChildrenBase(): Collection<StructureViewTreeElement> { return arrayListOf<StructureViewTreeElement>() } }
# ... existing code ... class RustStructDeclFieldTreeElement(element: RustStructDeclField) : PsiTreeElementBase<RustStructDeclField>(element) { override fun getPresentableText(): String? { return "${element?.identifier?.text}: ${element?.typeSum?.text}" } override fun getChildrenBase(): Collection<StructureViewTreeElement> { # ... rest of the code ...
f31c8690d3b28b770377be148a102d96f73272d5
src/foam/core/MultitonInfo.java
src/foam/core/MultitonInfo.java
/** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.core; import java.util.Map; import java.util.HashMap; public class MultitonInfo<T> implements Axiom, ContextFactory<T> { Map<Object, T> instanceMap = new HashMap<Object, T>(); String name; PropertyInfo p; public MultitonInfo(String name, PropertyInfo p) { this.name = name; this.p = p; } public String getName() { return name; } public T getInstance(Map<String, Object> args, X x) { Object key = args.get(p.getName()); if ( ! instanceMap.containsKey(key) ) { try { Class<T> type = (Class<T>)p.getClassInfo().getObjClass(); T obj = type.newInstance(); ((ContextAware)obj).setX(x); for (Map.Entry<String, Object> entry : args.entrySet()) { ((FObject)obj).setProperty(entry.getKey(), entry.getValue()); } instanceMap.put(key, obj); } catch (java.lang.Exception e) { e.printStackTrace(); return null; } } return instanceMap.get(key); } }
/** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.core; import java.util.Map; import java.util.HashMap; public class MultitonInfo<T> implements Axiom, ContextFactory<T> { Map<Object, T> instanceMap = new HashMap<Object, T>(); String name; PropertyInfo p; public MultitonInfo(String name, PropertyInfo p) { this.name = name; this.p = p; } public String getName() { return name; } public synchronized T getInstance(Map<String, Object> args, X x) { Object key = args.get(p.getName()); if ( ! instanceMap.containsKey(key) ) { try { Class<T> type = (Class<T>)p.getClassInfo().getObjClass(); T obj = type.newInstance(); ((ContextAware)obj).setX(x); for ( Map.Entry<String, Object> entry : args.entrySet() ) { ((FObject)obj).setProperty(entry.getKey(), entry.getValue()); } instanceMap.put(key, obj); } catch (java.lang.Exception e) { e.printStackTrace(); return null; } } return instanceMap.get(key); } }
Make multiton creation thread safe and fix some code style.
Make multiton creation thread safe and fix some code style.
Java
apache-2.0
foam-framework/foam2,jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2
java
## Code Before: /** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.core; import java.util.Map; import java.util.HashMap; public class MultitonInfo<T> implements Axiom, ContextFactory<T> { Map<Object, T> instanceMap = new HashMap<Object, T>(); String name; PropertyInfo p; public MultitonInfo(String name, PropertyInfo p) { this.name = name; this.p = p; } public String getName() { return name; } public T getInstance(Map<String, Object> args, X x) { Object key = args.get(p.getName()); if ( ! instanceMap.containsKey(key) ) { try { Class<T> type = (Class<T>)p.getClassInfo().getObjClass(); T obj = type.newInstance(); ((ContextAware)obj).setX(x); for (Map.Entry<String, Object> entry : args.entrySet()) { ((FObject)obj).setProperty(entry.getKey(), entry.getValue()); } instanceMap.put(key, obj); } catch (java.lang.Exception e) { e.printStackTrace(); return null; } } return instanceMap.get(key); } } ## Instruction: Make multiton creation thread safe and fix some code style. ## Code After: /** * @license * Copyright 2017 The FOAM Authors. All Rights Reserved. * http://www.apache.org/licenses/LICENSE-2.0 */ package foam.core; import java.util.Map; import java.util.HashMap; public class MultitonInfo<T> implements Axiom, ContextFactory<T> { Map<Object, T> instanceMap = new HashMap<Object, T>(); String name; PropertyInfo p; public MultitonInfo(String name, PropertyInfo p) { this.name = name; this.p = p; } public String getName() { return name; } public synchronized T getInstance(Map<String, Object> args, X x) { Object key = args.get(p.getName()); if ( ! instanceMap.containsKey(key) ) { try { Class<T> type = (Class<T>)p.getClassInfo().getObjClass(); T obj = type.newInstance(); ((ContextAware)obj).setX(x); for ( Map.Entry<String, Object> entry : args.entrySet() ) { ((FObject)obj).setProperty(entry.getKey(), entry.getValue()); } instanceMap.put(key, obj); } catch (java.lang.Exception e) { e.printStackTrace(); return null; } } return instanceMap.get(key); } }
... return name; } public synchronized T getInstance(Map<String, Object> args, X x) { Object key = args.get(p.getName()); if ( ! instanceMap.containsKey(key) ) { try { ... Class<T> type = (Class<T>)p.getClassInfo().getObjClass(); T obj = type.newInstance(); ((ContextAware)obj).setX(x); for ( Map.Entry<String, Object> entry : args.entrySet() ) { ((FObject)obj).setProperty(entry.getKey(), entry.getValue()); } instanceMap.put(key, obj); ...
b12e44fe00403fc1b98eebec47a8c54d6e2a8a13
src/main/java/com/l2fprod/common/beans/editor/EnumerationPropertyEditor.java
src/main/java/com/l2fprod/common/beans/editor/EnumerationPropertyEditor.java
/* * Copyright 2015 Matthew Aguirre * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.beans.editor; import com.l2fprod.common.annotations.EditorRegistry; import javax.swing.JComboBox; /** * * @author matta */ @EditorRegistry(type = Enum.class) public class EnumerationPropertyEditor extends ComboBoxPropertyEditor { private Class<?> baseType; public EnumerationPropertyEditor() { super(); } @Override public Object getValue() { JComboBox e = (JComboBox) editor; return e.getSelectedItem(); } @Override public void setValue(Object value) { if (this.baseType == null) { this.baseType = value.getClass(); setAvailableValues(baseType.getEnumConstants()); } JComboBox e = (JComboBox) editor; for (int ii = 0; ii < e.getItemCount(); ii++) { if (value != null && e.getItemAt(ii).equals(value.getClass().getCanonicalName())) { e.setSelectedIndex(ii); break; } } } }
/* * Copyright 2015 Matthew Aguirre * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.beans.editor; import com.l2fprod.common.annotations.EditorRegistry; import javax.swing.JComboBox; /** * * @author matta */ @EditorRegistry(type = Enum.class) public class EnumerationPropertyEditor extends ComboBoxPropertyEditor { private Class<?> baseType; public EnumerationPropertyEditor() { super(); } @Override public Object getValue() { JComboBox e = (JComboBox) editor; return e.getSelectedItem(); } @Override public void setValue(Object value) { if (this.baseType == null) { this.baseType = value.getClass(); setAvailableValues(baseType.getEnumConstants()); } JComboBox e = (JComboBox) editor; e.getModel().setSelectedItem(value); } }
Fix item selection in enum property editor.
Fix item selection in enum property editor.
Java
apache-2.0
ZenHarbinger/l2fprod-properties-editor
java
## Code Before: /* * Copyright 2015 Matthew Aguirre * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.beans.editor; import com.l2fprod.common.annotations.EditorRegistry; import javax.swing.JComboBox; /** * * @author matta */ @EditorRegistry(type = Enum.class) public class EnumerationPropertyEditor extends ComboBoxPropertyEditor { private Class<?> baseType; public EnumerationPropertyEditor() { super(); } @Override public Object getValue() { JComboBox e = (JComboBox) editor; return e.getSelectedItem(); } @Override public void setValue(Object value) { if (this.baseType == null) { this.baseType = value.getClass(); setAvailableValues(baseType.getEnumConstants()); } JComboBox e = (JComboBox) editor; for (int ii = 0; ii < e.getItemCount(); ii++) { if (value != null && e.getItemAt(ii).equals(value.getClass().getCanonicalName())) { e.setSelectedIndex(ii); break; } } } } ## Instruction: Fix item selection in enum property editor. ## Code After: /* * Copyright 2015 Matthew Aguirre * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.beans.editor; import com.l2fprod.common.annotations.EditorRegistry; import javax.swing.JComboBox; /** * * @author matta */ @EditorRegistry(type = Enum.class) public class EnumerationPropertyEditor extends ComboBoxPropertyEditor { private Class<?> baseType; public EnumerationPropertyEditor() { super(); } @Override public Object getValue() { JComboBox e = (JComboBox) editor; return e.getSelectedItem(); } @Override public void setValue(Object value) { if (this.baseType == null) { this.baseType = value.getClass(); setAvailableValues(baseType.getEnumConstants()); } JComboBox e = (JComboBox) editor; e.getModel().setSelectedItem(value); } }
// ... existing code ... } JComboBox e = (JComboBox) editor; e.getModel().setSelectedItem(value); } } // ... rest of the code ...
26f0b938bb8619f3ec705ff247c4d671613883fa
django/santropolFeast/member/factories.py
django/santropolFeast/member/factories.py
import factory import datetime import random from django.contrib.auth.models import User from member.models import Member, Address, Contact, Client, PAYMENT_TYPE from member.models import DELIVERY_TYPE, GENDER_CHOICES class AddressFactory (factory.DjangoModelFactory): class Meta: model = Address street = factory.Faker('street_address') city = 'Montreal' postal_code = factory.Faker('postalcode') class ContactFactory (factory.DjangoModelFactory): class Meta: model = Contact type = 'Home phone' value = factory.Faker('phone_number') class MemberFactory (factory.DjangoModelFactory): class Meta: model = Member firstname = factory.Faker('first_name') lastname = factory.Faker('last_name') address = factory.SubFactory(AddressFactory) class ClientFactory (factory.DjangoModelFactory): class Meta: model = Client member = factory.SubFactory(MemberFactory) billing_member = member billing_payment_type = random.choice(PAYMENT_TYPE)[0] rate_type = "default" member = member emergency_contact = factory.SubFactory(MemberFactory) status = random.choice(Client.CLIENT_STATUS)[0] language = "en" alert = factory.Faker('sentence') delivery_type = random.choice(DELIVERY_TYPE)[0] gender = random.choice(GENDER_CHOICES)[0] birthdate = factory.Faker('date')
import factory import datetime import random from django.contrib.auth.models import User from member.models import Member, Address, Contact, Client, PAYMENT_TYPE, Route from member.models import DELIVERY_TYPE, GENDER_CHOICES class AddressFactory (factory.DjangoModelFactory): class Meta: model = Address street = factory.Faker('street_address') city = 'Montreal' postal_code = factory.Faker('postalcode') class ContactFactory (factory.DjangoModelFactory): class Meta: model = Contact type = 'Home phone' value = factory.Faker('phone_number') class MemberFactory (factory.DjangoModelFactory): class Meta: model = Member firstname = factory.Faker('first_name') lastname = factory.Faker('last_name') address = factory.SubFactory(AddressFactory) class ClientFactory (factory.DjangoModelFactory): class Meta: model = Client member = factory.SubFactory(MemberFactory) billing_member = member billing_payment_type = random.choice(PAYMENT_TYPE)[0] rate_type = "default" member = member emergency_contact = factory.SubFactory(MemberFactory) status = random.choice(Client.CLIENT_STATUS)[0] language = "en" alert = factory.Faker('sentence') delivery_type = random.choice(DELIVERY_TYPE)[0] gender = random.choice(GENDER_CHOICES)[0] birthdate = factory.Faker('date') route = random.choice(Route.objects.all())
Add a random <Route> to a generated <Client>
Add a random <Route> to a generated <Client> Issue #214
Python
agpl-3.0
savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,madmath/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/santropol-feast
python
## Code Before: import factory import datetime import random from django.contrib.auth.models import User from member.models import Member, Address, Contact, Client, PAYMENT_TYPE from member.models import DELIVERY_TYPE, GENDER_CHOICES class AddressFactory (factory.DjangoModelFactory): class Meta: model = Address street = factory.Faker('street_address') city = 'Montreal' postal_code = factory.Faker('postalcode') class ContactFactory (factory.DjangoModelFactory): class Meta: model = Contact type = 'Home phone' value = factory.Faker('phone_number') class MemberFactory (factory.DjangoModelFactory): class Meta: model = Member firstname = factory.Faker('first_name') lastname = factory.Faker('last_name') address = factory.SubFactory(AddressFactory) class ClientFactory (factory.DjangoModelFactory): class Meta: model = Client member = factory.SubFactory(MemberFactory) billing_member = member billing_payment_type = random.choice(PAYMENT_TYPE)[0] rate_type = "default" member = member emergency_contact = factory.SubFactory(MemberFactory) status = random.choice(Client.CLIENT_STATUS)[0] language = "en" alert = factory.Faker('sentence') delivery_type = random.choice(DELIVERY_TYPE)[0] gender = random.choice(GENDER_CHOICES)[0] birthdate = factory.Faker('date') ## Instruction: Add a random <Route> to a generated <Client> Issue #214 ## Code After: import factory import datetime import random from django.contrib.auth.models import User from member.models import Member, Address, Contact, Client, PAYMENT_TYPE, Route from member.models import DELIVERY_TYPE, GENDER_CHOICES class AddressFactory (factory.DjangoModelFactory): class Meta: model = Address street = factory.Faker('street_address') city = 'Montreal' postal_code = factory.Faker('postalcode') class ContactFactory (factory.DjangoModelFactory): class Meta: model = Contact type = 'Home phone' value = factory.Faker('phone_number') class MemberFactory (factory.DjangoModelFactory): class Meta: model = Member firstname = factory.Faker('first_name') lastname = factory.Faker('last_name') address = factory.SubFactory(AddressFactory) class ClientFactory (factory.DjangoModelFactory): class Meta: model = Client member = factory.SubFactory(MemberFactory) billing_member = member billing_payment_type = random.choice(PAYMENT_TYPE)[0] rate_type = "default" member = member emergency_contact = factory.SubFactory(MemberFactory) status = random.choice(Client.CLIENT_STATUS)[0] language = "en" alert = factory.Faker('sentence') delivery_type = random.choice(DELIVERY_TYPE)[0] gender = random.choice(GENDER_CHOICES)[0] birthdate = factory.Faker('date') route = random.choice(Route.objects.all())
... import datetime import random from django.contrib.auth.models import User from member.models import Member, Address, Contact, Client, PAYMENT_TYPE, Route from member.models import DELIVERY_TYPE, GENDER_CHOICES ... delivery_type = random.choice(DELIVERY_TYPE)[0] gender = random.choice(GENDER_CHOICES)[0] birthdate = factory.Faker('date') route = random.choice(Route.objects.all()) ...
2158edb92cba6c19fa258f19445191d0308c4153
utils/async_tasks.py
utils/async_tasks.py
from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output
from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
Add option to run async tasks only on at a time
Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis.
Python
agpl-3.0
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
python
## Code Before: from utils.redis_store import store def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. if elapsed_time > refresh_time: task_func.delay(store_key, *task_args, **task_kwargs) return output ## Instruction: Add option to run async tasks only on at a time This is implemented with a simple lock like mechanism using redis. ## Code After: from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key)
// ... existing code ... from utils.redis_store import store from celery.signals import task_postrun, task_prerun def data_from_async_task(task_func, task_args, task_kwargs, store_key, refresh_time=60, run_once=True): # Get task results previously stored in store output, elapsed_time = store.get(store_key, include_elapsed_time=True) // ... modified code ... # If there are no previously stored results (elapsed_time will be a magically big number) or # if the previously stored results are older than refresh_time, then we trigger recompute of the # task so that results are ready for next load. # If run_once=True, we only trigger the recompute if the task is not already running if elapsed_time > refresh_time: if run_once: # Check that it is not already running computing_store_key = 'computing-{0}.{1}'.format(task_func.__module__, task_func.__name__) if store.get(computing_store_key): # Task is already running, don't trigger running again print('Skip computing data for {0}, already running'.format(store_key)) return output task_func.delay(store_key, *task_args, **task_kwargs) return output @task_prerun.connect() def task_prerun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None): # Set computing key computing_store_key = 'computing-{0}'.format(task.name) store.set(computing_store_key, {'running': True}) @task_postrun.connect() def task_postrun(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None): # Delete computing key (if present) computing_store_key = 'computing-{0}'.format(task.name) store.delete(computing_store_key) // ... rest of the code ...
67c6e446503055f3b29f2ecd8269b09f5743ab85
insts/jg/FREEVERB/denormals.h
insts/jg/FREEVERB/denormals.h
// Macro for killing denormalled numbers // // Written by Jezar at Dreampoint, June 2000 // http://www.dreampoint.co.uk // Based on IS_DENORMAL macro by Jon Watte // This code is public domain #ifndef _denormals_ #define _denormals_ #if 0 #define undenormalise(sample) if(((*(unsigned int*)&sample)&0x7f800000)==0) sample=0.0f #else #define undenormalise(sample) #endif #endif//_denormals_ //ends
// Macro for killing denormalled numbers // // Written by Jezar at Dreampoint, June 2000 // http://www.dreampoint.co.uk // Based on IS_DENORMAL macro by Jon Watte // This code is public domain #ifndef _denormals_ #define _denormals_ #if defined(i386) #ifdef NOMORE // original code doesn't work on recent gcc compilers #define undenormalise(sample) \ if (((*(unsigned int*)&sample)&0x7f800000) == 0) sample = 0.0f #else // !NOMORE // see <ccrma-mail.stanford.edu/pipermail/planetccrma/2005-January/007868.html> static inline float undenormalise(volatile float s) { s += 9.8607615E-32f; return s - 9.8607615E-32f; } //#define undenormalise(sample) #endif // !NOMORE #else // !defined(i386) #define undenormalise(sample) // nothing #endif // !defined(i386) #endif//_denormals_ //ends
Replace denormalise macro, broken on recent gcc's.
Replace denormalise macro, broken on recent gcc's.
C
apache-2.0
RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix,RTcmix/RTcmix
c
## Code Before: // Macro for killing denormalled numbers // // Written by Jezar at Dreampoint, June 2000 // http://www.dreampoint.co.uk // Based on IS_DENORMAL macro by Jon Watte // This code is public domain #ifndef _denormals_ #define _denormals_ #if 0 #define undenormalise(sample) if(((*(unsigned int*)&sample)&0x7f800000)==0) sample=0.0f #else #define undenormalise(sample) #endif #endif//_denormals_ //ends ## Instruction: Replace denormalise macro, broken on recent gcc's. ## Code After: // Macro for killing denormalled numbers // // Written by Jezar at Dreampoint, June 2000 // http://www.dreampoint.co.uk // Based on IS_DENORMAL macro by Jon Watte // This code is public domain #ifndef _denormals_ #define _denormals_ #if defined(i386) #ifdef NOMORE // original code doesn't work on recent gcc compilers #define undenormalise(sample) \ if (((*(unsigned int*)&sample)&0x7f800000) == 0) sample = 0.0f #else // !NOMORE // see <ccrma-mail.stanford.edu/pipermail/planetccrma/2005-January/007868.html> static inline float undenormalise(volatile float s) { s += 9.8607615E-32f; return s - 9.8607615E-32f; } //#define undenormalise(sample) #endif // !NOMORE #else // !defined(i386) #define undenormalise(sample) // nothing #endif // !defined(i386) #endif//_denormals_ //ends
... #ifndef _denormals_ #define _denormals_ #if defined(i386) #ifdef NOMORE // original code doesn't work on recent gcc compilers #define undenormalise(sample) \ if (((*(unsigned int*)&sample)&0x7f800000) == 0) sample = 0.0f #else // !NOMORE // see <ccrma-mail.stanford.edu/pipermail/planetccrma/2005-January/007868.html> static inline float undenormalise(volatile float s) { s += 9.8607615E-32f; return s - 9.8607615E-32f; } //#define undenormalise(sample) #endif // !NOMORE #else // !defined(i386) #define undenormalise(sample) // nothing #endif // !defined(i386) #endif//_denormals_ ...
6bcf987ac927c4cd9829b55ec2521d77fcc2c3ad
examples/test_mfa_login.py
examples/test_mfa_login.py
from seleniumbase import BaseCase class TestMFALogin(BaseCase): def test_mfa_login(self): self.open("https://seleniumbase.io/realworld/login") self.type("#username", "demo_user") self.type("#password", "secret_pass") self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG") self.highlight("img#image1") self.assert_text("Welcome!", "h1") self.save_screenshot_to_logs()
from seleniumbase import BaseCase class TestMFALogin(BaseCase): def test_mfa_login(self): self.open("https://seleniumbase.io/realworld/login") self.type("#username", "demo_user") self.type("#password", "secret_pass") self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG") self.highlight("img#image1") self.click('a:contains("This Page")') self.assert_text("Welcome!", "h1") self.save_screenshot_to_logs()
Add a click() call to an example test
Add a click() call to an example test
Python
mit
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
python
## Code Before: from seleniumbase import BaseCase class TestMFALogin(BaseCase): def test_mfa_login(self): self.open("https://seleniumbase.io/realworld/login") self.type("#username", "demo_user") self.type("#password", "secret_pass") self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG") self.highlight("img#image1") self.assert_text("Welcome!", "h1") self.save_screenshot_to_logs() ## Instruction: Add a click() call to an example test ## Code After: from seleniumbase import BaseCase class TestMFALogin(BaseCase): def test_mfa_login(self): self.open("https://seleniumbase.io/realworld/login") self.type("#username", "demo_user") self.type("#password", "secret_pass") self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG") self.highlight("img#image1") self.click('a:contains("This Page")') self.assert_text("Welcome!", "h1") self.save_screenshot_to_logs()
// ... existing code ... self.type("#password", "secret_pass") self.enter_mfa_code("#totpcode", "GAXG2MTEOR3DMMDG") self.highlight("img#image1") self.click('a:contains("This Page")') self.assert_text("Welcome!", "h1") self.save_screenshot_to_logs() // ... rest of the code ...
0a5e0935782bdd4c8669a39566d619aa4816ab60
custom/aaa/utils.py
custom/aaa/utils.py
from __future__ import absolute_import from __future__ import unicode_literals from corehq.apps.locations.models import LocationType, SQLLocation def build_location_filters(location_id): try: location = SQLLocation.objects.get(location_id=location_id) except SQLLocation.DoesNotExist: return {'state_id': 'ALL'} location_ancestors = location.get_ancestors(include_self=True) filters = { "{}_id".format(ancestor.location_type.code): ancestor.location_id for ancestor in location_ancestors } location_type = location.location_type child_location_type = LocationType.objects.filter(domain=location_type.domain, parent_type=location_type) filters["{}_id".format(child_location_type.code)] = 'All' return filters
from __future__ import absolute_import from __future__ import unicode_literals from django.db import connections from corehq.apps.locations.models import LocationType, SQLLocation from custom.aaa.models import AggAwc, AggVillage, CcsRecord, Child, Woman def build_location_filters(location_id): try: location = SQLLocation.objects.get(location_id=location_id) except SQLLocation.DoesNotExist: return {'state_id': 'ALL'} location_ancestors = location.get_ancestors(include_self=True) filters = { "{}_id".format(ancestor.location_type.code): ancestor.location_id for ancestor in location_ancestors } location_type = location.location_type child_location_type = LocationType.objects.filter(domain=location_type.domain, parent_type=location_type) filters["{}_id".format(child_location_type.code)] = 'All' return filters def explain_aggregation_queries(domain, window_start, window_end): queries = {} for cls in (AggAwc, AggVillage, CcsRecord, Child, Woman): for agg_query in cls.aggregation_queries: explanation = _explain_query(cls, agg_query, domain, window_start, window_end) queries[explanation[0]] = explanation[1] return queries def _explain_query(cls, method, domain, window_start, window_end): agg_query, agg_params = method(domain, window_start, window_end) with connections['aaa-data'].cursor() as cursor: cursor.execute('explain ' + agg_query, agg_params) return cls.__name__ + method.__name__, cursor.fetchall()
Create easy explanations for aggregation queries
Create easy explanations for aggregation queries
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
python
## Code Before: from __future__ import absolute_import from __future__ import unicode_literals from corehq.apps.locations.models import LocationType, SQLLocation def build_location_filters(location_id): try: location = SQLLocation.objects.get(location_id=location_id) except SQLLocation.DoesNotExist: return {'state_id': 'ALL'} location_ancestors = location.get_ancestors(include_self=True) filters = { "{}_id".format(ancestor.location_type.code): ancestor.location_id for ancestor in location_ancestors } location_type = location.location_type child_location_type = LocationType.objects.filter(domain=location_type.domain, parent_type=location_type) filters["{}_id".format(child_location_type.code)] = 'All' return filters ## Instruction: Create easy explanations for aggregation queries ## Code After: from __future__ import absolute_import from __future__ import unicode_literals from django.db import connections from corehq.apps.locations.models import LocationType, SQLLocation from custom.aaa.models import AggAwc, AggVillage, CcsRecord, Child, Woman def build_location_filters(location_id): try: location = SQLLocation.objects.get(location_id=location_id) except SQLLocation.DoesNotExist: return {'state_id': 'ALL'} location_ancestors = location.get_ancestors(include_self=True) filters = { "{}_id".format(ancestor.location_type.code): ancestor.location_id for ancestor in location_ancestors } location_type = location.location_type child_location_type = LocationType.objects.filter(domain=location_type.domain, parent_type=location_type) filters["{}_id".format(child_location_type.code)] = 'All' return filters def explain_aggregation_queries(domain, window_start, window_end): queries = {} for cls in (AggAwc, AggVillage, CcsRecord, Child, Woman): for agg_query in cls.aggregation_queries: explanation = _explain_query(cls, agg_query, domain, window_start, window_end) queries[explanation[0]] = explanation[1] return queries def _explain_query(cls, method, domain, window_start, window_end): agg_query, agg_params = method(domain, window_start, window_end) with connections['aaa-data'].cursor() as cursor: cursor.execute('explain ' + agg_query, agg_params) return cls.__name__ + method.__name__, cursor.fetchall()
# ... existing code ... from __future__ import absolute_import from __future__ import unicode_literals from django.db import connections from corehq.apps.locations.models import LocationType, SQLLocation from custom.aaa.models import AggAwc, AggVillage, CcsRecord, Child, Woman def build_location_filters(location_id): # ... modified code ... filters["{}_id".format(child_location_type.code)] = 'All' return filters def explain_aggregation_queries(domain, window_start, window_end): queries = {} for cls in (AggAwc, AggVillage, CcsRecord, Child, Woman): for agg_query in cls.aggregation_queries: explanation = _explain_query(cls, agg_query, domain, window_start, window_end) queries[explanation[0]] = explanation[1] return queries def _explain_query(cls, method, domain, window_start, window_end): agg_query, agg_params = method(domain, window_start, window_end) with connections['aaa-data'].cursor() as cursor: cursor.execute('explain ' + agg_query, agg_params) return cls.__name__ + method.__name__, cursor.fetchall() # ... rest of the code ...
edb905aec44e3fb2086ae87df960597e7b4c8356
scoring/machinelearning/neuralnetwork.py
scoring/machinelearning/neuralnetwork.py
from ffnet import ffnet,mlgraph,tmlgraph import numpy as np class neuralnetwork: def __init__(self, shape, loadnet=None, full_conn=True, biases=False): """ shape: shape of a NN given as a tuple """ if loadnet: self.model = ffnet() self.model.load(loadnet) else: if full_conn: conec = tmlgraph(shape, biases) else: conec = mlgraph(shapebiases) self.model = ffnet(conec) def fit(self, input_descriptors, target_values, train_alg='tnc'): getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000) def predict(self, input_descriptors): return np.array(self.model.call(input_descriptors)).flatten()
from ffnet import ffnet,mlgraph,tmlgraph import numpy as np from scipy.stats import linregress class neuralnetwork: def __init__(self, shape, loadnet=None, full_conn=True, biases=False): """ shape: shape of a NN given as a tuple """ if loadnet: self.model = ffnet() self.model.load(loadnet) else: if full_conn: conec = tmlgraph(shape, biases) else: conec = mlgraph(shapebiases) self.model = ffnet(conec) def fit(self, input_descriptors, target_values, train_alg='tnc'): getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000) def predict(self, input_descriptors): return np.array(self.model.call(input_descriptors)) def score(X, y): return linregress(self.predict(X), y)[2]**2
Add missing methods to NN class
Add missing methods to NN class
Python
bsd-3-clause
mwojcikowski/opendrugdiscovery
python
## Code Before: from ffnet import ffnet,mlgraph,tmlgraph import numpy as np class neuralnetwork: def __init__(self, shape, loadnet=None, full_conn=True, biases=False): """ shape: shape of a NN given as a tuple """ if loadnet: self.model = ffnet() self.model.load(loadnet) else: if full_conn: conec = tmlgraph(shape, biases) else: conec = mlgraph(shapebiases) self.model = ffnet(conec) def fit(self, input_descriptors, target_values, train_alg='tnc'): getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000) def predict(self, input_descriptors): return np.array(self.model.call(input_descriptors)).flatten() ## Instruction: Add missing methods to NN class ## Code After: from ffnet import ffnet,mlgraph,tmlgraph import numpy as np from scipy.stats import linregress class neuralnetwork: def __init__(self, shape, loadnet=None, full_conn=True, biases=False): """ shape: shape of a NN given as a tuple """ if loadnet: self.model = ffnet() self.model.load(loadnet) else: if full_conn: conec = tmlgraph(shape, biases) else: conec = mlgraph(shapebiases) self.model = ffnet(conec) def fit(self, input_descriptors, target_values, train_alg='tnc'): getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000) def predict(self, input_descriptors): return np.array(self.model.call(input_descriptors)) def score(X, y): return linregress(self.predict(X), y)[2]**2
// ... existing code ... from ffnet import ffnet,mlgraph,tmlgraph import numpy as np from scipy.stats import linregress class neuralnetwork: def __init__(self, shape, loadnet=None, full_conn=True, biases=False): // ... modified code ... getattr(self.model, 'train_'+train_alg)(input_descriptors, target_values, maxfun=10000) def predict(self, input_descriptors): return np.array(self.model.call(input_descriptors)) def score(X, y): return linregress(self.predict(X), y)[2]**2 // ... rest of the code ...
74e240d3e2e397eb8f3b0e63a1666412c3c1c66b
app/__init__.py
app/__init__.py
from flask import Flask from config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) from .aflafrettir import aflafrettir as afla_blueprint app.register_blueprint(afla_blueprint) return app
from flask import Flask from flask.ext.bootstrap import Bootstrap from config import config bootstrap = Bootstrap() def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) bootstrap.init_app(app) from .aflafrettir import aflafrettir as afla_blueprint app.register_blueprint(afla_blueprint) return app
Add flask-bootstrap to the mix
Add flask-bootstrap to the mix
Python
mit
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
python
## Code Before: from flask import Flask from config import config def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) from .aflafrettir import aflafrettir as afla_blueprint app.register_blueprint(afla_blueprint) return app ## Instruction: Add flask-bootstrap to the mix ## Code After: from flask import Flask from flask.ext.bootstrap import Bootstrap from config import config bootstrap = Bootstrap() def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) bootstrap.init_app(app) from .aflafrettir import aflafrettir as afla_blueprint app.register_blueprint(afla_blueprint) return app
... from flask import Flask from flask.ext.bootstrap import Bootstrap from config import config bootstrap = Bootstrap() def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) bootstrap.init_app(app) from .aflafrettir import aflafrettir as afla_blueprint app.register_blueprint(afla_blueprint) ...
3916efe4a017fe9e0fb1c5fe09b99f374d7a4060
instana/__init__.py
instana/__init__.py
__author__ = 'Instana Inc.' __copyright__ = 'Copyright 2016 Instana Inc.' __credits__ = ['Pavlo Baron'] __license__ = 'MIT' __version__ = '0.0.1' __maintainer__ = 'Pavlo Baron' __email__ = '[email protected]' __all__ = ['sensor', 'tracer']
__author__ = 'Instana Inc.' __copyright__ = 'Copyright 2017 Instana Inc.' __credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo'] __license__ = 'MIT' __version__ = '0.6.6' __maintainer__ = 'Peter Giacomo Lombardo' __email__ = '[email protected]' __all__ = ['sensor', 'tracer']
Update module init file; begin version stamping here.
Update module init file; begin version stamping here.
Python
mit
instana/python-sensor,instana/python-sensor
python
## Code Before: __author__ = 'Instana Inc.' __copyright__ = 'Copyright 2016 Instana Inc.' __credits__ = ['Pavlo Baron'] __license__ = 'MIT' __version__ = '0.0.1' __maintainer__ = 'Pavlo Baron' __email__ = '[email protected]' __all__ = ['sensor', 'tracer'] ## Instruction: Update module init file; begin version stamping here. ## Code After: __author__ = 'Instana Inc.' __copyright__ = 'Copyright 2017 Instana Inc.' __credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo'] __license__ = 'MIT' __version__ = '0.6.6' __maintainer__ = 'Peter Giacomo Lombardo' __email__ = '[email protected]' __all__ = ['sensor', 'tracer']
# ... existing code ... __author__ = 'Instana Inc.' __copyright__ = 'Copyright 2017 Instana Inc.' __credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo'] __license__ = 'MIT' __version__ = '0.6.6' __maintainer__ = 'Peter Giacomo Lombardo' __email__ = '[email protected]' __all__ = ['sensor', 'tracer'] # ... rest of the code ...
4b62610ef39e999c1328e884b01cc14c7dcd9591
core/alsp_src/generic/fpbasis.c
core/alsp_src/generic/fpbasis.c
/*===========================================================================* | fpbasis.c | Copyright (c) 1996-97 Applied Logic Systems, Inc. | | -- Floating point math abstractions | *===========================================================================*/ #include "defs.h" #include "fpbasis.h" #ifdef MacOS #include <fp.h> #endif int is_ieee_nan PARAMS( (double) ); int is_ieee_inf PARAMS( (double) ); int is_ieee_nan(v) double v; { return isnan(v); } int is_ieee_inf(v) double v; { #ifdef SOLARIS switch (fpclass(v)) { case FP_NINF: return(1); case FP_PINF: return(1); default: return(0); } #elif defined(WIN32) || defined(AIX) return !finite(v); #elif defined(MacOS) return !isfinite(v); #elif (defined(__sgi) && defined(__mips)) return(!finite(v)); #else return isinf(v); #endif }
/*===========================================================================* | fpbasis.c | Copyright (c) 1996-97 Applied Logic Systems, Inc. | | -- Floating point math abstractions | *===========================================================================*/ #include "defs.h" #include "fpbasis.h" #ifdef MacOS #include <fp.h> #endif int is_ieee_nan PARAMS( (double) ); int is_ieee_inf PARAMS( (double) ); int is_ieee_nan(v) double v; { return isnan(v); } int is_ieee_inf(v) double v; { #if defined(SOLARIS) || defined(UNIX_SOLARIS) switch (fpclass(v)) { case FP_NINF: return(1); case FP_PINF: return(1); default: return(0); } #elif defined(WIN32) || defined(AIX) return !finite(v); #elif defined(MacOS) return !isfinite(v); #elif (defined(__sgi) && defined(__mips)) return(!finite(v)); #else return isinf(v); #endif }
Change for new config system
Change for new config system
C
mit
AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog,AppliedLogicSystems/ALSProlog
c
## Code Before: /*===========================================================================* | fpbasis.c | Copyright (c) 1996-97 Applied Logic Systems, Inc. | | -- Floating point math abstractions | *===========================================================================*/ #include "defs.h" #include "fpbasis.h" #ifdef MacOS #include <fp.h> #endif int is_ieee_nan PARAMS( (double) ); int is_ieee_inf PARAMS( (double) ); int is_ieee_nan(v) double v; { return isnan(v); } int is_ieee_inf(v) double v; { #ifdef SOLARIS switch (fpclass(v)) { case FP_NINF: return(1); case FP_PINF: return(1); default: return(0); } #elif defined(WIN32) || defined(AIX) return !finite(v); #elif defined(MacOS) return !isfinite(v); #elif (defined(__sgi) && defined(__mips)) return(!finite(v)); #else return isinf(v); #endif } ## Instruction: Change for new config system ## Code After: /*===========================================================================* | fpbasis.c | Copyright (c) 1996-97 Applied Logic Systems, Inc. | | -- Floating point math abstractions | *===========================================================================*/ #include "defs.h" #include "fpbasis.h" #ifdef MacOS #include <fp.h> #endif int is_ieee_nan PARAMS( (double) ); int is_ieee_inf PARAMS( (double) ); int is_ieee_nan(v) double v; { return isnan(v); } int is_ieee_inf(v) double v; { #if defined(SOLARIS) || defined(UNIX_SOLARIS) switch (fpclass(v)) { case FP_NINF: return(1); case FP_PINF: return(1); default: return(0); } #elif defined(WIN32) || defined(AIX) return !finite(v); #elif defined(MacOS) return !isfinite(v); #elif (defined(__sgi) && defined(__mips)) return(!finite(v)); #else return isinf(v); #endif }
# ... existing code ... is_ieee_inf(v) double v; { #if defined(SOLARIS) || defined(UNIX_SOLARIS) switch (fpclass(v)) { case FP_NINF: return(1); # ... rest of the code ...
859722fea0ed205c1af37c43c211d2f2855d22fc
fonts/create.py
fonts/create.py
import fontforge fontforge.open('input.otf').save('termu.sfd')
import fontforge font = fontforge.open('input.otf') font.fontname = 'Termu-' + font.fontname font.familyname = 'Termu: ' + font.familyname font.fullname = 'Termu: ' + font.fullname font.save('termu.sfd')
Add "termu" to the name of the font
Add "termu" to the name of the font
Python
mit
CoderPuppy/cc-emu,CoderPuppy/cc-emu,CoderPuppy/cc-emu
python
## Code Before: import fontforge fontforge.open('input.otf').save('termu.sfd') ## Instruction: Add "termu" to the name of the font ## Code After: import fontforge font = fontforge.open('input.otf') font.fontname = 'Termu-' + font.fontname font.familyname = 'Termu: ' + font.familyname font.fullname = 'Termu: ' + font.fullname font.save('termu.sfd')
... import fontforge font = fontforge.open('input.otf') font.fontname = 'Termu-' + font.fontname font.familyname = 'Termu: ' + font.familyname font.fullname = 'Termu: ' + font.fullname font.save('termu.sfd') ...
564ae1eb637ec509f37ade93d4079117cc73fd58
lab_assistant/storage/__init__.py
lab_assistant/storage/__init__.py
from copy import deepcopy from simpleflake import simpleflake from lab_assistant import conf, utils __all__ = [ 'get_storage', 'store', 'retrieve', 'retrieve_all', 'clear', ] def get_storage(path=None, name='Experiment', **opts): if not path: path = conf.storage['path'] _opts = deepcopy(conf.storage.get('options', {})) _opts.update(opts) opts = _opts if path in get_storage._cache: return get_storage._cache[path] Storage = utils.import_path(path) get_storage._cache[path] = Storage(name, **opts) return get_storage._cache[path] get_storage._cache = {} def store(result, storage=None): storage = storage or get_storage(name=result.experiment.name) key = simpleflake() storage.set(key, result) return key def retrieve(key, storage=None): storage = storage or get_storage() return storage.get(key) def retrieve_all(storage=None): return (storage or get_storage()).list() def remove(key, storage=None): (storage or get_storage()).remove(key) def clear(storage=None): return (storage or get_storage()).clear()
from copy import deepcopy from collections import defaultdict from simpleflake import simpleflake from lab_assistant import conf, utils __all__ = [ 'get_storage', 'store', 'retrieve', 'retrieve_all', 'clear', ] def get_storage(path=None, name='Experiment', **opts): if not path: path = conf.storage['path'] _opts = deepcopy(conf.storage.get('options', {})) _opts.update(opts) opts = _opts if path in get_storage._cache: if name in get_storage._cache[path]: return get_storage._cache[path][name] Storage = utils.import_path(path) get_storage._cache[path].update({ name: Storage(name, **opts) }) return get_storage._cache[path][name] get_storage._cache = defaultdict(dict) def store(result, storage=None): storage = storage or get_storage(name=result.experiment.name) key = simpleflake() storage.set(key, result) return key def retrieve(key, storage=None): storage = storage or get_storage() return storage.get(key) def retrieve_all(storage=None): return (storage or get_storage()).list() def remove(key, storage=None): (storage or get_storage()).remove(key) def clear(storage=None): return (storage or get_storage()).clear()
Fix get_storage cache to hold separate entries for each experiment key
Fix get_storage cache to hold separate entries for each experiment key
Python
mit
joealcorn/lab_assistant
python
## Code Before: from copy import deepcopy from simpleflake import simpleflake from lab_assistant import conf, utils __all__ = [ 'get_storage', 'store', 'retrieve', 'retrieve_all', 'clear', ] def get_storage(path=None, name='Experiment', **opts): if not path: path = conf.storage['path'] _opts = deepcopy(conf.storage.get('options', {})) _opts.update(opts) opts = _opts if path in get_storage._cache: return get_storage._cache[path] Storage = utils.import_path(path) get_storage._cache[path] = Storage(name, **opts) return get_storage._cache[path] get_storage._cache = {} def store(result, storage=None): storage = storage or get_storage(name=result.experiment.name) key = simpleflake() storage.set(key, result) return key def retrieve(key, storage=None): storage = storage or get_storage() return storage.get(key) def retrieve_all(storage=None): return (storage or get_storage()).list() def remove(key, storage=None): (storage or get_storage()).remove(key) def clear(storage=None): return (storage or get_storage()).clear() ## Instruction: Fix get_storage cache to hold separate entries for each experiment key ## Code After: from copy import deepcopy from collections import defaultdict from simpleflake import simpleflake from lab_assistant import conf, utils __all__ = [ 'get_storage', 'store', 'retrieve', 'retrieve_all', 'clear', ] def get_storage(path=None, name='Experiment', **opts): if not path: path = conf.storage['path'] _opts = deepcopy(conf.storage.get('options', {})) _opts.update(opts) opts = _opts if path in get_storage._cache: if name in get_storage._cache[path]: return get_storage._cache[path][name] Storage = utils.import_path(path) get_storage._cache[path].update({ name: Storage(name, **opts) }) return get_storage._cache[path][name] get_storage._cache = defaultdict(dict) def store(result, storage=None): storage = storage or get_storage(name=result.experiment.name) key = simpleflake() storage.set(key, result) return key def retrieve(key, storage=None): storage = storage or get_storage() return storage.get(key) def retrieve_all(storage=None): return (storage or get_storage()).list() def remove(key, storage=None): (storage or get_storage()).remove(key) def clear(storage=None): return (storage or get_storage()).clear()
# ... existing code ... from copy import deepcopy from collections import defaultdict from simpleflake import simpleflake # ... modified code ... opts = _opts if path in get_storage._cache: if name in get_storage._cache[path]: return get_storage._cache[path][name] Storage = utils.import_path(path) get_storage._cache[path].update({ name: Storage(name, **opts) }) return get_storage._cache[path][name] get_storage._cache = defaultdict(dict) def store(result, storage=None): # ... rest of the code ...
8b9ebbad9e87af3f56570ba3c32dcdb2d7ca4a39
django_iceberg/models/base_models.py
django_iceberg/models/base_models.py
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
Add app_name for django < 1.7 compatibility
Add app_name for django < 1.7 compatibility
Python
mit
izberg-marketplace/django-izberg,izberg-marketplace/django-izberg,Iceberg-Marketplace/django-iceberg,Iceberg-Marketplace/django-iceberg
python
## Code Before: from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError ## Instruction: Add app_name for django < 1.7 compatibility ## Code After: from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ DEFAULT_ICEBERG_ENV = getattr(settings, 'ICEBERG_DEFAULT_ENVIRO', "prod") class IcebergBaseModel(models.Model): ICEBERG_PROD, ICEBERG_SANDBOX, ICEBERG_STAGE, ICEBERG_SANDBOX_STAGE = "prod", "sandbox", "stage", "sandbox_stage" ENVIRONMENT_CHOICES = ( (ICEBERG_PROD, _('Iceberg - Prod')), (ICEBERG_STAGE, _('Iceberg - Prod Stage')), # PreProd (ICEBERG_SANDBOX, _('Iceberg - Sandbox')), (ICEBERG_SANDBOX_STAGE, _('Iceberg - Sandbox Stage')), ) environment = models.CharField(choices=ENVIRONMENT_CHOICES, default=DEFAULT_ICEBERG_ENV, max_length = 20) iceberg_id = models.PositiveIntegerField(blank=True, null=True) last_updated = models.DateTimeField(auto_now = True) API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): """ Sync the local object from Iceberg version """ raise NotImplementedError
# ... existing code ... API_RESOURCE_NAME = None class Meta: app_label = "django_iceberg" abstract = True def iceberg_sync(self, api_handler): # ... rest of the code ...