commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
82954f3df7e3b8f0a4cb921e40f351938451221d
cd/lambdas/pipeline-fail-notification/lambda_function.py
cd/lambdas/pipeline-fail-notification/lambda_function.py
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') period_start = datetime.now() - timedelta(seconds=60) pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: exec = action_state['latestExecution'] if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() put_job_failure(job, 'Function exception: ' + str(e))
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: if 'latestExecution' in action_state: execution = action_state['latestExecution'] timezone = execution['lastStatusChange'].tzinfo period_start = datetime.now(timezone) - timedelta(seconds=60) if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc()
Fix CD fail lambda python
Fix CD fail lambda python
Python
mit
PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure,PRX/Infrastructure
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') - period_start = datetime.now() - timedelta(seconds=60) - pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: + if 'latestExecution' in action_state: - exec = action_state['latestExecution'] + execution = action_state['latestExecution'] + timezone = execution['lastStatusChange'].tzinfo + period_start = datetime.now(timezone) - timedelta(seconds=60) + - if execution['lastStatusChange'] > period_start: + if execution['lastStatusChange'] > period_start: - if execution['status'] == 'Failed': + if execution['status'] == 'Failed': - post_notification(action_state) + post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() - put_job_failure(job, 'Function exception: ' + str(e))
Fix CD fail lambda python
## Code Before: import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') period_start = datetime.now() - timedelta(seconds=60) pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: exec = action_state['latestExecution'] if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() put_job_failure(job, 'Function exception: ' + str(e)) ## Instruction: Fix CD fail lambda python ## Code After: import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: if 'latestExecution' in action_state: execution = action_state['latestExecution'] timezone = execution['lastStatusChange'].tzinfo period_start = datetime.now(timezone) - timedelta(seconds=60) if execution['lastStatusChange'] > period_start: if execution['status'] == 'Failed': post_notification(action_state) return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc()
import boto3 import traceback import json import os from datetime import datetime, timedelta code_pipeline = boto3.client('codepipeline') sns = boto3.client('sns') def post_notification(action_state): topic_arn = os.environ['CODEPIPELINE_FAILURES_TOPIC_ARN'] message = json.dumps(action_state) sns.publish(TopicArn=topic_arn, Message=message) def lambda_handler(event, context): try: print('Checking pipeline state...') - period_start = datetime.now() - timedelta(seconds=60) - pipeline_name = os.environ['PIPELINE_NAME'] pipeline_state = code_pipeline.get_pipeline_state(name=pipeline_name) for stage_state in pipeline_state['stageStates']: for action_state in stage_state['actionStates']: + if 'latestExecution' in action_state: - exec = action_state['latestExecution'] + execution = action_state['latestExecution'] ? ++++ +++++ + timezone = execution['lastStatusChange'].tzinfo + period_start = datetime.now(timezone) - timedelta(seconds=60) + - if execution['lastStatusChange'] > period_start: + if execution['lastStatusChange'] > period_start: ? ++++ - if execution['status'] == 'Failed': + if execution['status'] == 'Failed': ? ++++ - post_notification(action_state) + post_notification(action_state) ? ++++ return '...Done' except Exception as e: print('Function failed due to exception.') print(e) traceback.print_exc() - put_job_failure(job, 'Function exception: ' + str(e))
702217fee6e332b3d08902bb67f0725626f0c88d
test_defuzz.py
test_defuzz.py
from defuzz import Defuzzer def test_it(): dfz = Defuzzer() assert dfz.defuzz((1, 2)) == (1, 2) assert dfz.defuzz((1, 3)) == (1, 3) assert dfz.defuzz((1.00000001, 2)) == (1, 2) assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5) assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3) assert dfz.defuzz((2, 3)) == (2.00000001, 3)
import itertools import math from defuzz import Defuzzer from hypothesis import given from hypothesis.strategies import floats, lists, tuples from hypo_helpers import f def test_it(): dfz = Defuzzer() assert dfz.defuzz((1, 2)) == (1, 2) assert dfz.defuzz((1, 3)) == (1, 3) assert dfz.defuzz((1.00000001, 2)) == (1, 2) assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5) assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3) assert dfz.defuzz((2, 3)) == (2.00000001, 3) @given(lists(tuples(f, f))) def test_hypo(points): dfz = Defuzzer(round_digits=2) dfz_points = [dfz.defuzz(pt) for pt in points] # The output values should all be in the inputs. assert all(pt in points for pt in dfz_points) # No two unequal output values should be too close together. if len(points) > 1: for a, b in itertools.combinations(dfz_points, 2): if a == b: continue distance = math.hypot(a[0] - b[0], a[1] - b[1]) assert distance > .01
Add a Hypothesis test for Defuzzer
Add a Hypothesis test for Defuzzer
Python
apache-2.0
nedbat/zellij
+ import itertools + import math + from defuzz import Defuzzer + + from hypothesis import given + from hypothesis.strategies import floats, lists, tuples + + from hypo_helpers import f def test_it(): dfz = Defuzzer() assert dfz.defuzz((1, 2)) == (1, 2) assert dfz.defuzz((1, 3)) == (1, 3) assert dfz.defuzz((1.00000001, 2)) == (1, 2) assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5) assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3) assert dfz.defuzz((2, 3)) == (2.00000001, 3) + + @given(lists(tuples(f, f))) + def test_hypo(points): + dfz = Defuzzer(round_digits=2) + dfz_points = [dfz.defuzz(pt) for pt in points] + + # The output values should all be in the inputs. + assert all(pt in points for pt in dfz_points) + + # No two unequal output values should be too close together. + if len(points) > 1: + for a, b in itertools.combinations(dfz_points, 2): + if a == b: + continue + distance = math.hypot(a[0] - b[0], a[1] - b[1]) + assert distance > .01 +
Add a Hypothesis test for Defuzzer
## Code Before: from defuzz import Defuzzer def test_it(): dfz = Defuzzer() assert dfz.defuzz((1, 2)) == (1, 2) assert dfz.defuzz((1, 3)) == (1, 3) assert dfz.defuzz((1.00000001, 2)) == (1, 2) assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5) assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3) assert dfz.defuzz((2, 3)) == (2.00000001, 3) ## Instruction: Add a Hypothesis test for Defuzzer ## Code After: import itertools import math from defuzz import Defuzzer from hypothesis import given from hypothesis.strategies import floats, lists, tuples from hypo_helpers import f def test_it(): dfz = Defuzzer() assert dfz.defuzz((1, 2)) == (1, 2) assert dfz.defuzz((1, 3)) == (1, 3) assert dfz.defuzz((1.00000001, 2)) == (1, 2) assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5) assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3) assert dfz.defuzz((2, 3)) == (2.00000001, 3) @given(lists(tuples(f, f))) def test_hypo(points): dfz = Defuzzer(round_digits=2) dfz_points = [dfz.defuzz(pt) for pt in points] # The output values should all be in the inputs. assert all(pt in points for pt in dfz_points) # No two unequal output values should be too close together. if len(points) > 1: for a, b in itertools.combinations(dfz_points, 2): if a == b: continue distance = math.hypot(a[0] - b[0], a[1] - b[1]) assert distance > .01
+ import itertools + import math + from defuzz import Defuzzer + + from hypothesis import given + from hypothesis.strategies import floats, lists, tuples + + from hypo_helpers import f def test_it(): dfz = Defuzzer() assert dfz.defuzz((1, 2)) == (1, 2) assert dfz.defuzz((1, 3)) == (1, 3) assert dfz.defuzz((1.00000001, 2)) == (1, 2) assert dfz.defuzz((1, 2, 3, 4, 5)) == (1, 2, 3, 4, 5) assert dfz.defuzz((2.00000001, 3)) == (2.00000001, 3) assert dfz.defuzz((2, 3)) == (2.00000001, 3) + + + @given(lists(tuples(f, f))) + def test_hypo(points): + dfz = Defuzzer(round_digits=2) + dfz_points = [dfz.defuzz(pt) for pt in points] + + # The output values should all be in the inputs. + assert all(pt in points for pt in dfz_points) + + # No two unequal output values should be too close together. + if len(points) > 1: + for a, b in itertools.combinations(dfz_points, 2): + if a == b: + continue + distance = math.hypot(a[0] - b[0], a[1] - b[1]) + assert distance > .01
08bb24ad80db72457c87533288b97942cc178dd6
src/kanboard/urls.py
src/kanboard/urls.py
import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) )
from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
Remove static file serving (using django-staticfiles instead is recommended)
Remove static file serving (using django-staticfiles instead is recommended)
Python
bsd-3-clause
zellyn/django-kanboard,zellyn/django-kanboard
- import os from django.conf.urls.defaults import patterns, url - import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) - # Serve static content - static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') - urlpatterns += patterns('', - (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) - )
Remove static file serving (using django-staticfiles instead is recommended)
## Code Before: import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) ) ## Instruction: Remove static file serving (using django-staticfiles instead is recommended) ## Code After: from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
- import os from django.conf.urls.defaults import patterns, url - import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) - # Serve static content - static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') - urlpatterns += patterns('', - (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) - )
9f843b34ef5c85d781b7dd322641d5459cf6190d
linked_accounts/backends.py
linked_accounts/backends.py
from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token)
from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token) return profile
Return profile from authenticate method
Return profile from authenticate method
Python
mit
zen4ever/django-linked-accounts,zen4ever/django-linked-accounts
from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token) + return profile
Return profile from authenticate method
## Code Before: from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token) ## Instruction: Return profile from authenticate method ## Code After: from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token) return profile
from django.contrib.auth.models import User from linked_accounts.utils import get_profile class LinkedAccountsBackend(object): def get_user(self, user_id): return User.objects.get(id=user_id) def authenticate(self, service=None, token=None): profile = get_profile(service, token) + return profile
a39b7b2b9b0c9179d3aedcc29286cdcebf568d54
tests.py
tests.py
''' Copyright 2009 Slide, Inc. ''' import unittest import pyecc DEFAULT_DATA = 'This message will be signed\n' DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq' DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn' DEFAULT_PRIVKEY = 'my private key' class ECC_Verify_Tests(unittest.TestCase): def test_BasicVerification(self): ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) if __name__ == '__main__': unittest.main()
''' Copyright 2009 Slide, Inc. ''' import unittest import pyecc DEFAULT_DATA = 'This message will be signed\n' DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq' DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn' DEFAULT_PRIVKEY = 'my private key' class ECC_Verify_Tests(unittest.TestCase): def setUp(self): super(ECC_Verify_Tests, self).setUp() self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) def test_BasicVerification(self): assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) def test_BadVerification(self): assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) if __name__ == '__main__': unittest.main()
Implement a bad sig test
Implement a bad sig test
Python
lgpl-2.1
rtyler/PyECC,slideinc/PyECC,rtyler/PyECC,slideinc/PyECC
''' Copyright 2009 Slide, Inc. ''' import unittest import pyecc DEFAULT_DATA = 'This message will be signed\n' DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq' DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn' DEFAULT_PRIVKEY = 'my private key' class ECC_Verify_Tests(unittest.TestCase): + def setUp(self): + super(ECC_Verify_Tests, self).setUp() + self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) + def test_BasicVerification(self): - ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) - - assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', + assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', + DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) + + def test_BadVerification(self): + assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) if __name__ == '__main__': unittest.main()
Implement a bad sig test
## Code Before: ''' Copyright 2009 Slide, Inc. ''' import unittest import pyecc DEFAULT_DATA = 'This message will be signed\n' DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq' DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn' DEFAULT_PRIVKEY = 'my private key' class ECC_Verify_Tests(unittest.TestCase): def test_BasicVerification(self): ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) if __name__ == '__main__': unittest.main() ## Instruction: Implement a bad sig test ## Code After: ''' Copyright 2009 Slide, Inc. ''' import unittest import pyecc DEFAULT_DATA = 'This message will be signed\n' DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq' DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn' DEFAULT_PRIVKEY = 'my private key' class ECC_Verify_Tests(unittest.TestCase): def setUp(self): super(ECC_Verify_Tests, self).setUp() self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) def test_BasicVerification(self): assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) def test_BadVerification(self): assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) if __name__ == '__main__': unittest.main()
''' Copyright 2009 Slide, Inc. ''' import unittest import pyecc DEFAULT_DATA = 'This message will be signed\n' DEFAULT_SIG = '$HPI?t(I*1vAYsl$|%21WXND=6Br*[>k(OR9B!GOwHqL0s+3Uq' DEFAULT_PUBKEY = '8W;>i^H0qi|J&$coR5MFpR*Vn' DEFAULT_PRIVKEY = 'my private key' class ECC_Verify_Tests(unittest.TestCase): + def setUp(self): + super(ECC_Verify_Tests, self).setUp() + self.ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) + def test_BasicVerification(self): - ecc = pyecc.ECC(public=DEFAULT_PUBKEY, private=DEFAULT_PRIVKEY) - - assert ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', + assert self.ecc.verify(DEFAULT_DATA, DEFAULT_SIG), ('Failed to verify signature', ? +++++ + DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) + + def test_BadVerification(self): + assert self.ecc.verify(DEFAULT_DATA, "FAIL") == False , ('Verified on a bad sig', DEFAULT_DATA, DEFAULT_SIG, DEFAULT_PUBKEY, DEFAULT_PRIVKEY) if __name__ == '__main__': unittest.main()
cc4211e2a3cdc58bf5ac3bf64711b881d1c046d0
modules/currency.py
modules/currency.py
import urllib.parse from bs4 import BeautifulSoup import re import syscmd def currency( self ): amount = 1 frm = "eur" to = "usd" if len(self.msg) < 7: self.send_chan("Usage: !currency <amount> <from> <to>") else: try: amount = float(self.msg[4]) except ValueError: pass frm = self.msg[5] to = self.msg[6] if isinstance( amount, float ): frm = urllib.parse.quote(frm) to = urllib.parse.quote(to) url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) html = syscmd.getHtml(self, url, True) try: soup = BeautifulSoup(html) result = soup.findAll("div", {"id" : "currency_converter_result"}) result = "{0}".format(result[0]) trimmed = re.sub('<[^<]+?>', '', result) self.send_chan(trimmed) except: pass
import urllib.parse from bs4 import BeautifulSoup import re import syscmd def currency( self ): amount = 1 frm = "eur" to = "usd" if len(self.msg) < 7: self.send_chan("Usage: !currency <amount> <from> <to>") if len(self.msg) == 7: try: amount = float(self.msg[4]) except ValueError: pass frm = self.msg[5].upper() to = self.msg[6].upper() combined = frm, to ## If first value is float and currencies are valid if isinstance( amount, float ) and frm in open("modules/data/currencies.txt").read(): print("Moi") frm = urllib.parse.quote(frm) to = urllib.parse.quote(to) url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) html = syscmd.getHtml(self, url, True) else: self.send_chan("Usage: !currency <amount> <from> <to>") try: soup = BeautifulSoup(html) result = soup.findAll("div", {"id" : "currency_converter_result"}) result = "{0}".format(result[0]) trimmed = re.sub('<[^<]+?>', '', result) self.send_chan(trimmed) except: pass
Check for valid currencies in a file
Check for valid currencies in a file
Python
mit
jasuka/pyBot,jasuka/pyBot
import urllib.parse from bs4 import BeautifulSoup import re import syscmd def currency( self ): amount = 1 frm = "eur" to = "usd" if len(self.msg) < 7: self.send_chan("Usage: !currency <amount> <from> <to>") - else: + if len(self.msg) == 7: try: amount = float(self.msg[4]) except ValueError: pass - frm = self.msg[5] + frm = self.msg[5].upper() - to = self.msg[6] + to = self.msg[6].upper() - if isinstance( amount, float ): + combined = frm, to + ## If first value is float and currencies are valid + if isinstance( amount, float ) and frm in open("modules/data/currencies.txt").read(): + print("Moi") - frm = urllib.parse.quote(frm) + frm = urllib.parse.quote(frm) - to = urllib.parse.quote(to) + to = urllib.parse.quote(to) - url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) + url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) - html = syscmd.getHtml(self, url, True) + html = syscmd.getHtml(self, url, True) + else: + self.send_chan("Usage: !currency <amount> <from> <to>") try: soup = BeautifulSoup(html) result = soup.findAll("div", {"id" : "currency_converter_result"}) result = "{0}".format(result[0]) trimmed = re.sub('<[^<]+?>', '', result) self.send_chan(trimmed) except: pass
Check for valid currencies in a file
## Code Before: import urllib.parse from bs4 import BeautifulSoup import re import syscmd def currency( self ): amount = 1 frm = "eur" to = "usd" if len(self.msg) < 7: self.send_chan("Usage: !currency <amount> <from> <to>") else: try: amount = float(self.msg[4]) except ValueError: pass frm = self.msg[5] to = self.msg[6] if isinstance( amount, float ): frm = urllib.parse.quote(frm) to = urllib.parse.quote(to) url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) html = syscmd.getHtml(self, url, True) try: soup = BeautifulSoup(html) result = soup.findAll("div", {"id" : "currency_converter_result"}) result = "{0}".format(result[0]) trimmed = re.sub('<[^<]+?>', '', result) self.send_chan(trimmed) except: pass ## Instruction: Check for valid currencies in a file ## Code After: import urllib.parse from bs4 import BeautifulSoup import re import syscmd def currency( self ): amount = 1 frm = "eur" to = "usd" if len(self.msg) < 7: self.send_chan("Usage: !currency <amount> <from> <to>") if len(self.msg) == 7: try: amount = float(self.msg[4]) except ValueError: pass frm = self.msg[5].upper() to = self.msg[6].upper() combined = frm, to ## If first value is float and currencies are valid if isinstance( amount, float ) and frm in open("modules/data/currencies.txt").read(): print("Moi") frm = urllib.parse.quote(frm) to = urllib.parse.quote(to) url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) html = syscmd.getHtml(self, url, True) else: self.send_chan("Usage: !currency <amount> <from> <to>") try: soup = BeautifulSoup(html) result = soup.findAll("div", {"id" : "currency_converter_result"}) result = "{0}".format(result[0]) trimmed = re.sub('<[^<]+?>', '', result) self.send_chan(trimmed) except: pass
import urllib.parse from bs4 import BeautifulSoup import re import syscmd def currency( self ): amount = 1 frm = "eur" to = "usd" if len(self.msg) < 7: self.send_chan("Usage: !currency <amount> <from> <to>") - else: + if len(self.msg) == 7: try: amount = float(self.msg[4]) except ValueError: pass - frm = self.msg[5] + frm = self.msg[5].upper() ? ++++++++ - to = self.msg[6] + to = self.msg[6].upper() ? ++++++++ - if isinstance( amount, float ): + combined = frm, to + ## If first value is float and currencies are valid + if isinstance( amount, float ) and frm in open("modules/data/currencies.txt").read(): + print("Moi") - frm = urllib.parse.quote(frm) + frm = urllib.parse.quote(frm) ? + - to = urllib.parse.quote(to) + to = urllib.parse.quote(to) ? + - url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) + url = "https://www.google.com/finance/converter?a={0}&from={1}&to={2}".format(amount, frm, to) ? + - html = syscmd.getHtml(self, url, True) ? -------- + html = syscmd.getHtml(self, url, True) ? + + else: + self.send_chan("Usage: !currency <amount> <from> <to>") try: soup = BeautifulSoup(html) result = soup.findAll("div", {"id" : "currency_converter_result"}) result = "{0}".format(result[0]) trimmed = re.sub('<[^<]+?>', '', result) self.send_chan(trimmed) except: pass
fbdc69e218a71e984982a39fc36de19b7cf56f90
Publishers/SamplePachube.py
Publishers/SamplePachube.py
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" environmentId = 2065 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "<Your-Pachube-Api-Key-Here>" environmentId = -1 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
Change to sample pachube script
Change to sample pachube script
Python
mit
markallanson/sspe,markallanson/sspe
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" - apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" + apiKey = "<Your-Pachube-Api-Key-Here>" - environmentId = 2065 + environmentId = -1 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
Change to sample pachube script
## Code Before: import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" environmentId = 2065 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"] ## Instruction: Change to sample pachube script ## Code After: import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" apiKey = "<Your-Pachube-Api-Key-Here>" environmentId = -1 def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
import clr from System import * from System.Net import WebClient from System.Xml import XmlDocument from System.Diagnostics import Trace url = "http://pachube.com/api/" - apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb" + apiKey = "<Your-Pachube-Api-Key-Here>" - environmentId = 2065 ? ^^^^ + environmentId = -1 ? ^^ def Publish(topic, data): ms = MemoryStream() Trace.WriteLine("Pachube Sample") client = WebClient() client.Headers.Add('X-PachubeApiKey', apiKey) watts, temp = 25, 44 resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp)) client.Dispose(); return 1 def CreateFullUrl(): return url + str(environmentId) + '.csv' def Shutdown(): return 1 def GetTopics(): return ["PowerMeter/CC128/Mark"]
58846603f8a5310bb0e6e1eaa9f9f599c315b041
django_webtest/response.py
django_webtest/response.py
from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property def url(self): return self['location'] @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
Add url property to DjangoWebtestResponse so assertRedirects works in 1.6.
Add url property to DjangoWebtestResponse so assertRedirects works in 1.6.
Python
mit
kmike/django-webtest,helenst/django-webtest,vaad2/django-webtest,django-webtest/django-webtest,abbottc/django-webtest,kharandziuk/django-webtest,abbottc/django-webtest,MikeAmy/django-webtest,andrewyoung1991/django-webtest,helenst/django-webtest,yrik/django-webtest,andrewyoung1991/django-webtest,andriisoldatenko/django-webtest,larssos/django-webtest,django-webtest/django-webtest,kmike/django-webtest,wbbradley/django-webtest,andriisoldatenko/django-webtest
from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property + def url(self): + return self['location'] + + @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
Add url property to DjangoWebtestResponse so assertRedirects works in 1.6.
## Code Before: from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item) ## Instruction: Add url property to DjangoWebtestResponse so assertRedirects works in 1.6. ## Code After: from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property def url(self): return self['location'] @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
from django.test import Client from django.http import SimpleCookie from webtest import TestResponse from django_webtest.compat import urlparse class DjangoWebtestResponse(TestResponse): """ WebOb's Response quacking more like django's HttpResponse. This is here to make more django's TestCase asserts work, not to provide a generally useful proxy. """ streaming = False @property def status_code(self): return self.status_int @property def _charset(self): return self.charset @property def content(self): return self.body @property + def url(self): + return self['location'] + + @property def client(self): client = Client() client.cookies = SimpleCookie() for k,v in self.test_app.cookies.items(): client.cookies[k] = v return client def __getitem__(self, item): item = item.lower() if item == 'location': # django's test response returns location as http://testserver/, # WebTest returns it as http://localhost:80/ e_scheme, e_netloc, e_path, e_query, e_fragment = urlparse.urlsplit(self.location) if e_netloc == 'localhost:80': e_netloc = 'testserver' return urlparse.urlunsplit([e_scheme, e_netloc, e_path, e_query, e_fragment]) for header, value in self.headerlist: if header.lower() == item: return value raise KeyError(item)
1aa121daa3c99849173d5cd4c6a80d6bf94f5186
saleor/attribute/__init__.py
saleor/attribute/__init__.py
class AttributeInputType: """The type that we expect to render the attribute's values as.""" DROPDOWN = "dropdown" MULTISELECT = "multiselect" FILE = "file" REFERENCE = "reference" CHOICES = [ (DROPDOWN, "Dropdown"), (MULTISELECT, "Multi Select"), (FILE, "File"), (REFERENCE, "Reference"), ] # list of the input types that can be used in variant selection ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN] class AttributeType: PRODUCT_TYPE = "product-type" PAGE_TYPE = "page-type" CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")] class AttributeEntityType: """Type of a reference entity type. Must match the name of the graphql type.""" PAGE = "Page" PRODUCT = "Product" CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
class AttributeInputType: """The type that we expect to render the attribute's values as.""" DROPDOWN = "dropdown" MULTISELECT = "multiselect" FILE = "file" REFERENCE = "reference" CHOICES = [ (DROPDOWN, "Dropdown"), (MULTISELECT, "Multi Select"), (FILE, "File"), (REFERENCE, "Reference"), ] # list of the input types that can be used in variant selection ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN] class AttributeType: PRODUCT_TYPE = "product-type" PAGE_TYPE = "page-type" CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")] class AttributeEntityType: """Type of a reference entity type. Must match the name of the graphql type. After adding new value, `REFERENCE_VALUE_NAME_MAPPING` and `ENTITY_TYPE_TO_MODEL_MAPPING` in saleor/graphql/attribute/utils.py must be updated. """ PAGE = "Page" PRODUCT = "Product" CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
Add info about required updates in AttributeEntityType
Add info about required updates in AttributeEntityType
Python
bsd-3-clause
mociepka/saleor,mociepka/saleor,mociepka/saleor
class AttributeInputType: """The type that we expect to render the attribute's values as.""" DROPDOWN = "dropdown" MULTISELECT = "multiselect" FILE = "file" REFERENCE = "reference" CHOICES = [ (DROPDOWN, "Dropdown"), (MULTISELECT, "Multi Select"), (FILE, "File"), (REFERENCE, "Reference"), ] # list of the input types that can be used in variant selection ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN] class AttributeType: PRODUCT_TYPE = "product-type" PAGE_TYPE = "page-type" CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")] class AttributeEntityType: - """Type of a reference entity type. Must match the name of the graphql type.""" + """Type of a reference entity type. Must match the name of the graphql type. + + After adding new value, `REFERENCE_VALUE_NAME_MAPPING` + and `ENTITY_TYPE_TO_MODEL_MAPPING` in saleor/graphql/attribute/utils.py + must be updated. + """ PAGE = "Page" PRODUCT = "Product" CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
Add info about required updates in AttributeEntityType
## Code Before: class AttributeInputType: """The type that we expect to render the attribute's values as.""" DROPDOWN = "dropdown" MULTISELECT = "multiselect" FILE = "file" REFERENCE = "reference" CHOICES = [ (DROPDOWN, "Dropdown"), (MULTISELECT, "Multi Select"), (FILE, "File"), (REFERENCE, "Reference"), ] # list of the input types that can be used in variant selection ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN] class AttributeType: PRODUCT_TYPE = "product-type" PAGE_TYPE = "page-type" CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")] class AttributeEntityType: """Type of a reference entity type. Must match the name of the graphql type.""" PAGE = "Page" PRODUCT = "Product" CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")] ## Instruction: Add info about required updates in AttributeEntityType ## Code After: class AttributeInputType: """The type that we expect to render the attribute's values as.""" DROPDOWN = "dropdown" MULTISELECT = "multiselect" FILE = "file" REFERENCE = "reference" CHOICES = [ (DROPDOWN, "Dropdown"), (MULTISELECT, "Multi Select"), (FILE, "File"), (REFERENCE, "Reference"), ] # list of the input types that can be used in variant selection ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN] class AttributeType: PRODUCT_TYPE = "product-type" PAGE_TYPE = "page-type" CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")] class AttributeEntityType: """Type of a reference entity type. Must match the name of the graphql type. After adding new value, `REFERENCE_VALUE_NAME_MAPPING` and `ENTITY_TYPE_TO_MODEL_MAPPING` in saleor/graphql/attribute/utils.py must be updated. """ PAGE = "Page" PRODUCT = "Product" CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
class AttributeInputType: """The type that we expect to render the attribute's values as.""" DROPDOWN = "dropdown" MULTISELECT = "multiselect" FILE = "file" REFERENCE = "reference" CHOICES = [ (DROPDOWN, "Dropdown"), (MULTISELECT, "Multi Select"), (FILE, "File"), (REFERENCE, "Reference"), ] # list of the input types that can be used in variant selection ALLOWED_IN_VARIANT_SELECTION = [DROPDOWN] class AttributeType: PRODUCT_TYPE = "product-type" PAGE_TYPE = "page-type" CHOICES = [(PRODUCT_TYPE, "Product type"), (PAGE_TYPE, "Page type")] class AttributeEntityType: - """Type of a reference entity type. Must match the name of the graphql type.""" ? --- + """Type of a reference entity type. Must match the name of the graphql type. + + After adding new value, `REFERENCE_VALUE_NAME_MAPPING` + and `ENTITY_TYPE_TO_MODEL_MAPPING` in saleor/graphql/attribute/utils.py + must be updated. + """ PAGE = "Page" PRODUCT = "Product" CHOICES = [(PAGE, "Page"), (PRODUCT, "Product")]
57444bdd253e428174c7a5475ef205063ac95ef3
lms/djangoapps/heartbeat/views.py
lms/djangoapps/heartbeat/views.py
import json from datetime import datetime from django.http import HttpResponse def heartbeat(request): """ Simple view that a loadbalancer can check to verify that the app is up """ output = { 'date': datetime.now().isoformat() } return HttpResponse(json.dumps(output, indent=4))
import json from datetime import datetime from django.http import HttpResponse from xmodule.modulestore.django import modulestore def heartbeat(request): """ Simple view that a loadbalancer can check to verify that the app is up """ output = { 'date': datetime.now().isoformat(), 'courses': [course.location for course in modulestore().get_courses()], } return HttpResponse(json.dumps(output, indent=4))
Make heartbeat url wait for courses to be loaded
Make heartbeat url wait for courses to be loaded
Python
agpl-3.0
benpatterson/edx-platform,bigdatauniversity/edx-platform,Softmotions/edx-platform,shashank971/edx-platform,shabab12/edx-platform,ampax/edx-platform,mcgachey/edx-platform,yokose-ks/edx-platform,Livit/Livit.Learn.EdX,DefyVentures/edx-platform,pdehaye/theming-edx-platform,jruiperezv/ANALYSE,carsongee/edx-platform,jjmiranda/edx-platform,sudheerchintala/LearnEraPlatForm,olexiim/edx-platform,shubhdev/edx-platform,beacloudgenius/edx-platform,eestay/edx-platform,beacloudgenius/edx-platform,Edraak/edx-platform,torchingloom/edx-platform,EDUlib/edx-platform,IONISx/edx-platform,alu042/edx-platform,alexthered/kienhoc-platform,zhenzhai/edx-platform,jruiperezv/ANALYSE,dkarakats/edx-platform,inares/edx-platform,hmcmooc/muddx-platform,xingyepei/edx-platform,cyanna/edx-platform,hkawasaki/kawasaki-aio8-0,Stanford-Online/edx-platform,Kalyzee/edx-platform,playm2mboy/edx-platform,atsolakid/edx-platform,cognitiveclass/edx-platform,Livit/Livit.Learn.EdX,prarthitm/edxplatform,procangroup/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,Softmotions/edx-platform,itsjeyd/edx-platform,adoosii/edx-platform,mtlchun/edx,dsajkl/123,chauhanhardik/populo_2,benpatterson/edx-platform,mjirayu/sit_academy,kalebhartje/schoolboost,alu042/edx-platform,chrisndodge/edx-platform,DNFcode/edx-platform,analyseuc3m/ANALYSE-v1,CourseTalk/edx-platform,appliedx/edx-platform,dsajkl/123,mahendra-r/edx-platform,waheedahmed/edx-platform,antoviaque/edx-platform,Stanford-Online/edx-platform,dsajkl/reqiop,vasyarv/edx-platform,xingyepei/edx-platform,torchingloom/edx-platform,bigdatauniversity/edx-platform,cecep-edu/edx-platform,shabab12/edx-platform,MakeHer/edx-platform,rhndg/openedx,mtlchun/edx,zofuthan/edx-platform,shubhdev/edxOnBaadal,Semi-global/edx-platform,zhenzhai/edx-platform,Endika/edx-platform,eestay/edx-platform,nttks/edx-platform,shurihell/testasia,jbzdak/edx-platform,chand3040/cloud_that,ESOedX/edx-platform,LICEF/edx-platform,cpennington/edx-platform,hkawasaki/kawasaki-aio8-2,xinjiguaike/edx-platform,edx-solutions/edx-platform,sameetb-cuelogic/edx-platform-test,MSOpenTech/edx-platform,ak2703/edx-platform,nanolearningllc/edx-platform-cypress,devs1991/test_edx_docmode,ZLLab-Mooc/edx-platform,eduNEXT/edx-platform,waheedahmed/edx-platform,romain-li/edx-platform,vasyarv/edx-platform,PepperPD/edx-pepper-platform,Ayub-Khan/edx-platform,halvertoluke/edx-platform,mitocw/edx-platform,peterm-itr/edx-platform,nttks/jenkins-test,inares/edx-platform,hkawasaki/kawasaki-aio8-2,defance/edx-platform,chauhanhardik/populo_2,shubhdev/edxOnBaadal,kamalx/edx-platform,arbrandes/edx-platform,fintech-circle/edx-platform,polimediaupv/edx-platform,shubhdev/edx-platform,inares/edx-platform,shashank971/edx-platform,TeachAtTUM/edx-platform,kamalx/edx-platform,carsongee/edx-platform,IONISx/edx-platform,iivic/BoiseStateX,OmarIthawi/edx-platform,knehez/edx-platform,IITBinterns13/edx-platform-dev,beacloudgenius/edx-platform,dcosentino/edx-platform,eestay/edx-platform,procangroup/edx-platform,iivic/BoiseStateX,martynovp/edx-platform,jelugbo/tundex,appsembler/edx-platform,ahmedaljazzar/edx-platform,msegado/edx-platform,olexiim/edx-platform,torchingloom/edx-platform,dsajkl/reqiop,alexthered/kienhoc-platform,AkA84/edx-platform,edx/edx-platform,naresh21/synergetics-edx-platform,kmoocdev/edx-platform,adoosii/edx-platform,y12uc231/edx-platform,jbassen/edx-platform,jamesblunt/edx-platform,appliedx/edx-platform,pabloborrego93/edx-platform,dkarakats/edx-platform,chand3040/cloud_that,pabloborrego93/edx-platform,B-MOOC/edx-platform,apigee/edx-platform,jswope00/griffinx,waheedahmed/edx-platform,eemirtekin/edx-platform,gsehub/edx-platform,SivilTaram/edx-platform,UOMx/edx-platform,J861449197/edx-platform,mitocw/edx-platform,hkawasaki/kawasaki-aio8-2,MSOpenTech/edx-platform,y12uc231/edx-platform,arbrandes/edx-platform,Edraak/edraak-platform,chudaol/edx-platform,eemirtekin/edx-platform,motion2015/edx-platform,IndonesiaX/edx-platform,jswope00/griffinx,rationalAgent/edx-platform-custom,shubhdev/edxOnBaadal,xinjiguaike/edx-platform,jonathan-beard/edx-platform,TeachAtTUM/edx-platform,gymnasium/edx-platform,kxliugang/edx-platform,etzhou/edx-platform,tanmaykm/edx-platform,auferack08/edx-platform,andyzsf/edx,vismartltd/edx-platform,chand3040/cloud_that,nagyistoce/edx-platform,valtech-mooc/edx-platform,kursitet/edx-platform,Unow/edx-platform,morenopc/edx-platform,iivic/BoiseStateX,hamzehd/edx-platform,teltek/edx-platform,etzhou/edx-platform,longmen21/edx-platform,openfun/edx-platform,jjmiranda/edx-platform,marcore/edx-platform,philanthropy-u/edx-platform,SivilTaram/edx-platform,nagyistoce/edx-platform,benpatterson/edx-platform,martynovp/edx-platform,PepperPD/edx-pepper-platform,jbzdak/edx-platform,EDUlib/edx-platform,pomegranited/edx-platform,AkA84/edx-platform,caesar2164/edx-platform,ubc/edx-platform,pepeportela/edx-platform,auferack08/edx-platform,pku9104038/edx-platform,stvstnfrd/edx-platform,10clouds/edx-platform,EduPepperPDTesting/pepper2013-testing,ESOedX/edx-platform,jswope00/GAI,zofuthan/edx-platform,antonve/s4-project-mooc,ZLLab-Mooc/edx-platform,xuxiao19910803/edx,10clouds/edx-platform,chauhanhardik/populo,nagyistoce/edx-platform,bdero/edx-platform,EduPepperPD/pepper2013,mtlchun/edx,fintech-circle/edx-platform,kxliugang/edx-platform,unicri/edx-platform,alexthered/kienhoc-platform,arifsetiawan/edx-platform,kalebhartje/schoolboost,ovnicraft/edx-platform,Semi-global/edx-platform,beni55/edx-platform,deepsrijit1105/edx-platform,WatanabeYasumasa/edx-platform,franosincic/edx-platform,torchingloom/edx-platform,bdero/edx-platform,fintech-circle/edx-platform,jbassen/edx-platform,jzoldak/edx-platform,pomegranited/edx-platform,mahendra-r/edx-platform,angelapper/edx-platform,kursitet/edx-platform,ZLLab-Mooc/edx-platform,sameetb-cuelogic/edx-platform-test,morpheby/levelup-by,jamesblunt/edx-platform,y12uc231/edx-platform,shubhdev/edx-platform,yokose-ks/edx-platform,pdehaye/theming-edx-platform,morenopc/edx-platform,nanolearning/edx-platform,philanthropy-u/edx-platform,Kalyzee/edx-platform,dsajkl/reqiop,mtlchun/edx,JCBarahona/edX,motion2015/a3,rationalAgent/edx-platform-custom,Endika/edx-platform,zubair-arbi/edx-platform,caesar2164/edx-platform,eduNEXT/edx-platform,SravanthiSinha/edx-platform,kmoocdev2/edx-platform,franosincic/edx-platform,LearnEra/LearnEraPlaftform,antonve/s4-project-mooc,nanolearning/edx-platform,jbzdak/edx-platform,10clouds/edx-platform,wwj718/edx-platform,Kalyzee/edx-platform,jbassen/edx-platform,10clouds/edx-platform,sudheerchintala/LearnEraPlatForm,ahmadio/edx-platform,nanolearningllc/edx-platform-cypress,ampax/edx-platform-backup,CredoReference/edx-platform,4eek/edx-platform,chand3040/cloud_that,hkawasaki/kawasaki-aio8-1,iivic/BoiseStateX,don-github/edx-platform,cpennington/edx-platform,valtech-mooc/edx-platform,wwj718/edx-platform,polimediaupv/edx-platform,kamalx/edx-platform,xinjiguaike/edx-platform,rue89-tech/edx-platform,zerobatu/edx-platform,bitifirefly/edx-platform,shubhdev/openedx,lduarte1991/edx-platform,nanolearningllc/edx-platform-cypress-2,ahmedaljazzar/edx-platform,jazkarta/edx-platform-for-isc,vasyarv/edx-platform,syjeon/new_edx,nikolas/edx-platform,marcore/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,ovnicraft/edx-platform,sameetb-cuelogic/edx-platform-test,cselis86/edx-platform,ovnicraft/edx-platform,nanolearning/edx-platform,shurihell/testasia,arifsetiawan/edx-platform,DefyVentures/edx-platform,nttks/edx-platform,EduPepperPD/pepper2013,louyihua/edx-platform,atsolakid/edx-platform,xingyepei/edx-platform,CourseTalk/edx-platform,ubc/edx-platform,jazztpt/edx-platform,shashank971/edx-platform,OmarIthawi/edx-platform,franosincic/edx-platform,raccoongang/edx-platform,jazkarta/edx-platform,kxliugang/edx-platform,MSOpenTech/edx-platform,nanolearningllc/edx-platform-cypress,msegado/edx-platform,Livit/Livit.Learn.EdX,hastexo/edx-platform,peterm-itr/edx-platform,ahmadio/edx-platform,nanolearningllc/edx-platform-cypress-2,amir-qayyum-khan/edx-platform,SivilTaram/edx-platform,mushtaqak/edx-platform,zadgroup/edx-platform,edx/edx-platform,pepeportela/edx-platform,ahmadiga/min_edx,fly19890211/edx-platform,jruiperezv/ANALYSE,kmoocdev/edx-platform,RPI-OPENEDX/edx-platform,WatanabeYasumasa/edx-platform,zadgroup/edx-platform,etzhou/edx-platform,jruiperezv/ANALYSE,shubhdev/edx-platform,openfun/edx-platform,cselis86/edx-platform,Edraak/circleci-edx-platform,abdoosh00/edx-rtl-final,eduNEXT/edx-platform,dkarakats/edx-platform,zubair-arbi/edx-platform,romain-li/edx-platform,iivic/BoiseStateX,gsehub/edx-platform,morpheby/levelup-by,nttks/jenkins-test,leansoft/edx-platform,antoviaque/edx-platform,Edraak/edraak-platform,unicri/edx-platform,jelugbo/tundex,edry/edx-platform,abdoosh00/edraak,Stanford-Online/edx-platform,itsjeyd/edx-platform,shubhdev/openedx,rismalrv/edx-platform,motion2015/edx-platform,yokose-ks/edx-platform,utecuy/edx-platform,ferabra/edx-platform,nikolas/edx-platform,romain-li/edx-platform,Edraak/edraak-platform,a-parhom/edx-platform,mahendra-r/edx-platform,amir-qayyum-khan/edx-platform,abdoosh00/edraak,jswope00/GAI,DNFcode/edx-platform,beni55/edx-platform,dcosentino/edx-platform,atsolakid/edx-platform,naresh21/synergetics-edx-platform,4eek/edx-platform,UOMx/edx-platform,xuxiao19910803/edx-platform,chauhanhardik/populo,longmen21/edx-platform,shubhdev/openedx,edx/edx-platform,pelikanchik/edx-platform,yokose-ks/edx-platform,SravanthiSinha/edx-platform,apigee/edx-platform,alu042/edx-platform,Lektorium-LLC/edx-platform,fly19890211/edx-platform,xuxiao19910803/edx,pelikanchik/edx-platform,carsongee/edx-platform,fintech-circle/edx-platform,jolyonb/edx-platform,Semi-global/edx-platform,apigee/edx-platform,mcgachey/edx-platform,jamesblunt/edx-platform,antonve/s4-project-mooc,doismellburning/edx-platform,Ayub-Khan/edx-platform,Edraak/circleci-edx-platform,xuxiao19910803/edx-platform,zofuthan/edx-platform,jzoldak/edx-platform,beacloudgenius/edx-platform,motion2015/edx-platform,appliedx/edx-platform,olexiim/edx-platform,a-parhom/edx-platform,cognitiveclass/edx-platform,olexiim/edx-platform,valtech-mooc/edx-platform,Livit/Livit.Learn.EdX,IndonesiaX/edx-platform,dkarakats/edx-platform,morpheby/levelup-by,dsajkl/123,martynovp/edx-platform,ampax/edx-platform-backup,tanmaykm/edx-platform,angelapper/edx-platform,jazkarta/edx-platform,SivilTaram/edx-platform,pelikanchik/edx-platform,IONISx/edx-platform,edx-solutions/edx-platform,J861449197/edx-platform,chrisndodge/edx-platform,Softmotions/edx-platform,openfun/edx-platform,zerobatu/edx-platform,atsolakid/edx-platform,atsolakid/edx-platform,mjirayu/sit_academy,rue89-tech/edx-platform,appliedx/edx-platform,ak2703/edx-platform,gsehub/edx-platform,ahmadiga/min_edx,don-github/edx-platform,shubhdev/edx-platform,Edraak/edx-platform,pku9104038/edx-platform,prarthitm/edxplatform,solashirai/edx-platform,mbareta/edx-platform-ft,xinjiguaike/edx-platform,BehavioralInsightsTeam/edx-platform,Softmotions/edx-platform,devs1991/test_edx_docmode,shabab12/edx-platform,TeachAtTUM/edx-platform,mjg2203/edx-platform-seas,peterm-itr/edx-platform,synergeticsedx/deployment-wipro,inares/edx-platform,lduarte1991/edx-platform,motion2015/a3,xuxiao19910803/edx-platform,Stanford-Online/edx-platform,doganov/edx-platform,appsembler/edx-platform,philanthropy-u/edx-platform,nanolearningllc/edx-platform-cypress-2,morenopc/edx-platform,carsongee/edx-platform,ferabra/edx-platform,andyzsf/edx,appsembler/edx-platform,Shrhawk/edx-platform,teltek/edx-platform,halvertoluke/edx-platform,eemirtekin/edx-platform,etzhou/edx-platform,tanmaykm/edx-platform,B-MOOC/edx-platform,xingyepei/edx-platform,beacloudgenius/edx-platform,jolyonb/edx-platform,jelugbo/tundex,mushtaqak/edx-platform,jazkarta/edx-platform,pku9104038/edx-platform,Edraak/edx-platform,ahmedaljazzar/edx-platform,appsembler/edx-platform,jswope00/griffinx,playm2mboy/edx-platform,ubc/edx-platform,kxliugang/edx-platform,y12uc231/edx-platform,JCBarahona/edX,RPI-OPENEDX/edx-platform,devs1991/test_edx_docmode,IITBinterns13/edx-platform-dev,adoosii/edx-platform,polimediaupv/edx-platform,UXE/local-edx,mahendra-r/edx-platform,SravanthiSinha/edx-platform,analyseuc3m/ANALYSE-v1,hmcmooc/muddx-platform,auferack08/edx-platform,jbzdak/edx-platform,kursitet/edx-platform,Endika/edx-platform,rismalrv/edx-platform,EduPepperPDTesting/pepper2013-testing,devs1991/test_edx_docmode,hamzehd/edx-platform,sudheerchintala/LearnEraPlatForm,dcosentino/edx-platform,ahmadiga/min_edx,kalebhartje/schoolboost,wwj718/ANALYSE,deepsrijit1105/edx-platform,syjeon/new_edx,procangroup/edx-platform,Unow/edx-platform,nikolas/edx-platform,J861449197/edx-platform,ahmedaljazzar/edx-platform,cognitiveclass/edx-platform,prarthitm/edxplatform,syjeon/new_edx,louyihua/edx-platform,defance/edx-platform,EduPepperPDTesting/pepper2013-testing,mjg2203/edx-platform-seas,MSOpenTech/edx-platform,Ayub-Khan/edx-platform,Edraak/edx-platform,devs1991/test_edx_docmode,edx-solutions/edx-platform,peterm-itr/edx-platform,vismartltd/edx-platform,vasyarv/edx-platform,ferabra/edx-platform,Shrhawk/edx-platform,openfun/edx-platform,pomegranited/edx-platform,ahmadio/edx-platform,eduNEXT/edunext-platform,UXE/local-edx,Lektorium-LLC/edx-platform,Edraak/circleci-edx-platform,don-github/edx-platform,simbs/edx-platform,chauhanhardik/populo,hmcmooc/muddx-platform,zubair-arbi/edx-platform,bitifirefly/edx-platform,DNFcode/edx-platform,Kalyzee/edx-platform,fly19890211/edx-platform,pdehaye/theming-edx-platform,shubhdev/edxOnBaadal,doganov/edx-platform,doismellburning/edx-platform,EDUlib/edx-platform,antonve/s4-project-mooc,rhndg/openedx,pku9104038/edx-platform,inares/edx-platform,philanthropy-u/edx-platform,deepsrijit1105/edx-platform,bdero/edx-platform,edry/edx-platform,prarthitm/edxplatform,jswope00/GAI,nanolearning/edx-platform,mjg2203/edx-platform-seas,nttks/jenkins-test,teltek/edx-platform,jamiefolsom/edx-platform,Unow/edx-platform,caesar2164/edx-platform,cecep-edu/edx-platform,solashirai/edx-platform,procangroup/edx-platform,gymnasium/edx-platform,devs1991/test_edx_docmode,polimediaupv/edx-platform,don-github/edx-platform,tiagochiavericosta/edx-platform,gymnasium/edx-platform,shubhdev/edxOnBaadal,nikolas/edx-platform,bigdatauniversity/edx-platform,vikas1885/test1,mjirayu/sit_academy,wwj718/edx-platform,utecuy/edx-platform,doismellburning/edx-platform,UXE/local-edx,vismartltd/edx-platform,Shrhawk/edx-platform,xuxiao19910803/edx,zhenzhai/edx-platform,rhndg/openedx,IndonesiaX/edx-platform,praveen-pal/edx-platform,MSOpenTech/edx-platform,kxliugang/edx-platform,WatanabeYasumasa/edx-platform,utecuy/edx-platform,jonathan-beard/edx-platform,vasyarv/edx-platform,simbs/edx-platform,alexthered/kienhoc-platform,EduPepperPD/pepper2013,chauhanhardik/populo,morenopc/edx-platform,J861449197/edx-platform,SravanthiSinha/edx-platform,hkawasaki/kawasaki-aio8-1,OmarIthawi/edx-platform,vikas1885/test1,devs1991/test_edx_docmode,eduNEXT/edunext-platform,jamiefolsom/edx-platform,amir-qayyum-khan/edx-platform,rue89-tech/edx-platform,waheedahmed/edx-platform,Endika/edx-platform,utecuy/edx-platform,raccoongang/edx-platform,beni55/edx-platform,jamiefolsom/edx-platform,gymnasium/edx-platform,AkA84/edx-platform,synergeticsedx/deployment-wipro,shashank971/edx-platform,ampax/edx-platform-backup,jazztpt/edx-platform,cognitiveclass/edx-platform,itsjeyd/edx-platform,bigdatauniversity/edx-platform,playm2mboy/edx-platform,shubhdev/openedx,jazkarta/edx-platform,eemirtekin/edx-platform,rue89-tech/edx-platform,4eek/edx-platform,pabloborrego93/edx-platform,olexiim/edx-platform,hamzehd/edx-platform,chauhanhardik/populo_2,louyihua/edx-platform,jjmiranda/edx-platform,mtlchun/edx,abdoosh00/edraak,IONISx/edx-platform,playm2mboy/edx-platform,jonathan-beard/edx-platform,kmoocdev2/edx-platform,pepeportela/edx-platform,benpatterson/edx-platform,vismartltd/edx-platform,rationalAgent/edx-platform-custom,yokose-ks/edx-platform,adoosii/edx-platform,jamesblunt/edx-platform,mcgachey/edx-platform,JioEducation/edx-platform,Edraak/edraak-platform,utecuy/edx-platform,martynovp/edx-platform,mushtaqak/edx-platform,jazkarta/edx-platform-for-isc,kursitet/edx-platform,cselis86/edx-platform,franosincic/edx-platform,franosincic/edx-platform,J861449197/edx-platform,PepperPD/edx-pepper-platform,jazztpt/edx-platform,bdero/edx-platform,hamzehd/edx-platform,pelikanchik/edx-platform,jazkarta/edx-platform,kmoocdev/edx-platform,chand3040/cloud_that,morenopc/edx-platform,edry/edx-platform,cecep-edu/edx-platform,LearnEra/LearnEraPlaftform,Unow/edx-platform,openfun/edx-platform,edry/edx-platform,LICEF/edx-platform,unicri/edx-platform,ESOedX/edx-platform,jelugbo/tundex,ubc/edx-platform,wwj718/ANALYSE,appliedx/edx-platform,cselis86/edx-platform,ak2703/edx-platform,MakeHer/edx-platform,alexthered/kienhoc-platform,rismalrv/edx-platform,cognitiveclass/edx-platform,halvertoluke/edx-platform,wwj718/edx-platform,teltek/edx-platform,zerobatu/edx-platform,DNFcode/edx-platform,shurihell/testasia,hkawasaki/kawasaki-aio8-0,andyzsf/edx,nanolearningllc/edx-platform-cypress,ovnicraft/edx-platform,ahmadiga/min_edx,nagyistoce/edx-platform,kmoocdev2/edx-platform,motion2015/a3,sudheerchintala/LearnEraPlatForm,EDUlib/edx-platform,Semi-global/edx-platform,eestay/edx-platform,hkawasaki/kawasaki-aio8-2,arifsetiawan/edx-platform,longmen21/edx-platform,defance/edx-platform,RPI-OPENEDX/edx-platform,morpheby/levelup-by,ferabra/edx-platform,valtech-mooc/edx-platform,leansoft/edx-platform,arifsetiawan/edx-platform,knehez/edx-platform,rue89-tech/edx-platform,DefyVentures/edx-platform,B-MOOC/edx-platform,cecep-edu/edx-platform,synergeticsedx/deployment-wipro,mushtaqak/edx-platform,IndonesiaX/edx-platform,xuxiao19910803/edx,analyseuc3m/ANALYSE-v1,rhndg/openedx,nanolearningllc/edx-platform-cypress,eestay/edx-platform,chauhanhardik/populo_2,playm2mboy/edx-platform,miptliot/edx-platform,adoosii/edx-platform,nttks/edx-platform,cpennington/edx-platform,tiagochiavericosta/edx-platform,abdoosh00/edx-rtl-final,rismalrv/edx-platform,4eek/edx-platform,wwj718/ANALYSE,JioEducation/edx-platform,kamalx/edx-platform,jazztpt/edx-platform,proversity-org/edx-platform,BehavioralInsightsTeam/edx-platform,motion2015/edx-platform,apigee/edx-platform,caesar2164/edx-platform,chauhanhardik/populo_2,jonathan-beard/edx-platform,WatanabeYasumasa/edx-platform,MakeHer/edx-platform,nttks/jenkins-test,B-MOOC/edx-platform,MakeHer/edx-platform,pabloborrego93/edx-platform,bigdatauniversity/edx-platform,mjg2203/edx-platform-seas,dkarakats/edx-platform,mitocw/edx-platform,nttks/jenkins-test,abdoosh00/edx-rtl-final,alu042/edx-platform,eduNEXT/edunext-platform,SivilTaram/edx-platform,ampax/edx-platform-backup,RPI-OPENEDX/edx-platform,chudaol/edx-platform,cselis86/edx-platform,benpatterson/edx-platform,halvertoluke/edx-platform,ampax/edx-platform,rationalAgent/edx-platform-custom,amir-qayyum-khan/edx-platform,arbrandes/edx-platform,dsajkl/123,ahmadiga/min_edx,doismellburning/edx-platform,AkA84/edx-platform,devs1991/test_edx_docmode,AkA84/edx-platform,LICEF/edx-platform,dcosentino/edx-platform,rismalrv/edx-platform,xingyepei/edx-platform,miptliot/edx-platform,chudaol/edx-platform,itsjeyd/edx-platform,solashirai/edx-platform,motion2015/edx-platform,pomegranited/edx-platform,vikas1885/test1,RPI-OPENEDX/edx-platform,mahendra-r/edx-platform,jswope00/griffinx,doismellburning/edx-platform,IITBinterns13/edx-platform-dev,praveen-pal/edx-platform,chudaol/edx-platform,cpennington/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,ahmadio/edx-platform,vikas1885/test1,nanolearning/edx-platform,vikas1885/test1,knehez/edx-platform,zerobatu/edx-platform,IndonesiaX/edx-platform,romain-li/edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-0,UOMx/edx-platform,naresh21/synergetics-edx-platform,UOMx/edx-platform,cyanna/edx-platform,jruiperezv/ANALYSE,bitifirefly/edx-platform,jelugbo/tundex,hkawasaki/kawasaki-aio8-1,leansoft/edx-platform,solashirai/edx-platform,nikolas/edx-platform,Softmotions/edx-platform,OmarIthawi/edx-platform,ampax/edx-platform,shurihell/testasia,proversity-org/edx-platform,cyanna/edx-platform,jazkarta/edx-platform-for-isc,kmoocdev/edx-platform,edx/edx-platform,eemirtekin/edx-platform,Shrhawk/edx-platform,hastexo/edx-platform,ampax/edx-platform-backup,PepperPD/edx-pepper-platform,unicri/edx-platform,zadgroup/edx-platform,analyseuc3m/ANALYSE-v1,eduNEXT/edx-platform,mitocw/edx-platform,JCBarahona/edX,UXE/local-edx,kalebhartje/schoolboost,abdoosh00/edx-rtl-final,msegado/edx-platform,leansoft/edx-platform,antonve/s4-project-mooc,hastexo/edx-platform,zubair-arbi/edx-platform,praveen-pal/edx-platform,ahmadio/edx-platform,Lektorium-LLC/edx-platform,arifsetiawan/edx-platform,ZLLab-Mooc/edx-platform,chudaol/edx-platform,dcosentino/edx-platform,JCBarahona/edX,JCBarahona/edX,fly19890211/edx-platform,SravanthiSinha/edx-platform,TsinghuaX/edx-platform,jbassen/edx-platform,marcore/edx-platform,jolyonb/edx-platform,LearnEra/LearnEraPlaftform,edry/edx-platform,ampax/edx-platform,jolyonb/edx-platform,edx-solutions/edx-platform,msegado/edx-platform,lduarte1991/edx-platform,solashirai/edx-platform,deepsrijit1105/edx-platform,bitifirefly/edx-platform,knehez/edx-platform,eduNEXT/edunext-platform,BehavioralInsightsTeam/edx-platform,beni55/edx-platform,xuxiao19910803/edx-platform,LICEF/edx-platform,LICEF/edx-platform,cyanna/edx-platform,Semi-global/edx-platform,miptliot/edx-platform,zubair-arbi/edx-platform,wwj718/edx-platform,DNFcode/edx-platform,a-parhom/edx-platform,simbs/edx-platform,miptliot/edx-platform,halvertoluke/edx-platform,DefyVentures/edx-platform,zofuthan/edx-platform,dsajkl/reqiop,proversity-org/edx-platform,jazztpt/edx-platform,wwj718/ANALYSE,TsinghuaX/edx-platform,chauhanhardik/populo,hkawasaki/kawasaki-aio8-1,kmoocdev2/edx-platform,ak2703/edx-platform,Ayub-Khan/edx-platform,tiagochiavericosta/edx-platform,B-MOOC/edx-platform,nanolearningllc/edx-platform-cypress-2,defance/edx-platform,xinjiguaike/edx-platform,martynovp/edx-platform,jonathan-beard/edx-platform,BehavioralInsightsTeam/edx-platform,abdoosh00/edraak,mbareta/edx-platform-ft,mbareta/edx-platform-ft,tiagochiavericosta/edx-platform,valtech-mooc/edx-platform,CourseTalk/edx-platform,JioEducation/edx-platform,praveen-pal/edx-platform,zerobatu/edx-platform,antoviaque/edx-platform,mushtaqak/edx-platform,jzoldak/edx-platform,Ayub-Khan/edx-platform,pepeportela/edx-platform,EduPepperPD/pepper2013,CredoReference/edx-platform,rationalAgent/edx-platform-custom,JioEducation/edx-platform,louyihua/edx-platform,mjirayu/sit_academy,don-github/edx-platform,TeachAtTUM/edx-platform,gsehub/edx-platform,kamalx/edx-platform,andyzsf/edx,ferabra/edx-platform,4eek/edx-platform,shurihell/testasia,ovnicraft/edx-platform,auferack08/edx-platform,cyanna/edx-platform,nanolearningllc/edx-platform-cypress-2,xuxiao19910803/edx,stvstnfrd/edx-platform,bitifirefly/edx-platform,ak2703/edx-platform,Edraak/circleci-edx-platform,antoviaque/edx-platform,raccoongang/edx-platform,mcgachey/edx-platform,zhenzhai/edx-platform,nagyistoce/edx-platform,motion2015/a3,xuxiao19910803/edx-platform,stvstnfrd/edx-platform,tanmaykm/edx-platform,MakeHer/edx-platform,jbzdak/edx-platform,longmen21/edx-platform,kmoocdev/edx-platform,EduPepperPDTesting/pepper2013-testing,simbs/edx-platform,msegado/edx-platform,mcgachey/edx-platform,TsinghuaX/edx-platform,mbareta/edx-platform-ft,jjmiranda/edx-platform,chrisndodge/edx-platform,beni55/edx-platform,tiagochiavericosta/edx-platform,kmoocdev2/edx-platform,stvstnfrd/edx-platform,IITBinterns13/edx-platform-dev,shubhdev/openedx,leansoft/edx-platform,fly19890211/edx-platform,zadgroup/edx-platform,unicri/edx-platform,Shrhawk/edx-platform,kalebhartje/schoolboost,vismartltd/edx-platform,cecep-edu/edx-platform,IONISx/edx-platform,jamesblunt/edx-platform,doganov/edx-platform,jamiefolsom/edx-platform,jswope00/GAI,marcore/edx-platform,kursitet/edx-platform,doganov/edx-platform,CredoReference/edx-platform,Lektorium-LLC/edx-platform,LearnEra/LearnEraPlaftform,waheedahmed/edx-platform,polimediaupv/edx-platform,naresh21/synergetics-edx-platform,DefyVentures/edx-platform,romain-li/edx-platform,chrisndodge/edx-platform,dsajkl/123,mjirayu/sit_academy,zhenzhai/edx-platform,CredoReference/edx-platform,TsinghuaX/edx-platform,ZLLab-Mooc/edx-platform,y12uc231/edx-platform,wwj718/ANALYSE,pdehaye/theming-edx-platform,jbassen/edx-platform,nttks/edx-platform,PepperPD/edx-pepper-platform,motion2015/a3,knehez/edx-platform,jswope00/griffinx,jamiefolsom/edx-platform,rhndg/openedx,raccoongang/edx-platform,etzhou/edx-platform,doganov/edx-platform,sameetb-cuelogic/edx-platform-test,lduarte1991/edx-platform,jazkarta/edx-platform-for-isc,ubc/edx-platform,hmcmooc/muddx-platform,hastexo/edx-platform,syjeon/new_edx,ESOedX/edx-platform,nttks/edx-platform,zadgroup/edx-platform,CourseTalk/edx-platform,shabab12/edx-platform,simbs/edx-platform,zofuthan/edx-platform,sameetb-cuelogic/edx-platform-test,pomegranited/edx-platform,hkawasaki/kawasaki-aio8-0,jazkarta/edx-platform-for-isc,shashank971/edx-platform,Kalyzee/edx-platform,EduPepperPD/pepper2013,torchingloom/edx-platform,jzoldak/edx-platform,proversity-org/edx-platform,synergeticsedx/deployment-wipro,EduPepperPDTesting/pepper2013-testing,EduPepperPDTesting/pepper2013-testing
import json from datetime import datetime from django.http import HttpResponse + from xmodule.modulestore.django import modulestore def heartbeat(request): """ Simple view that a loadbalancer can check to verify that the app is up """ output = { - 'date': datetime.now().isoformat() + 'date': datetime.now().isoformat(), + 'courses': [course.location for course in modulestore().get_courses()], } return HttpResponse(json.dumps(output, indent=4))
Make heartbeat url wait for courses to be loaded
## Code Before: import json from datetime import datetime from django.http import HttpResponse def heartbeat(request): """ Simple view that a loadbalancer can check to verify that the app is up """ output = { 'date': datetime.now().isoformat() } return HttpResponse(json.dumps(output, indent=4)) ## Instruction: Make heartbeat url wait for courses to be loaded ## Code After: import json from datetime import datetime from django.http import HttpResponse from xmodule.modulestore.django import modulestore def heartbeat(request): """ Simple view that a loadbalancer can check to verify that the app is up """ output = { 'date': datetime.now().isoformat(), 'courses': [course.location for course in modulestore().get_courses()], } return HttpResponse(json.dumps(output, indent=4))
import json from datetime import datetime from django.http import HttpResponse + from xmodule.modulestore.django import modulestore def heartbeat(request): """ Simple view that a loadbalancer can check to verify that the app is up """ output = { - 'date': datetime.now().isoformat() + 'date': datetime.now().isoformat(), ? + + 'courses': [course.location for course in modulestore().get_courses()], } return HttpResponse(json.dumps(output, indent=4))
10bfa701f352e0f916b1edd9913bee788f09568f
oscar/apps/catalogue/managers.py
oscar/apps/catalogue/managers.py
from django.db import models class ProductManager(models.Manager): def base_queryset(self): """ Return ``QuerySet`` with related content pre-loaded. """ return self.get_query_set().select_related('product_class')\ .prefetch_related('variants', 'product_options', 'product_class__options', 'stockrecords', 'images', ).all() class BrowsableProductManager(ProductManager): """ Excludes non-canonical products """ def get_query_set(self): return super(BrowsableProductManager, self).get_query_set().filter( parent=None)
from django.db import models class ProductQuerySet(models.query.QuerySet): def base_queryset(self): """ Applies select_related and prefetch_related for commonly related models to save on queries """ return self.select_related('product_class')\ .prefetch_related('variants', 'product_options', 'product_class__options', 'stockrecords', 'images', ) def browsable(self): """ Excludes non-canonical products. """ return self.filter(parent=None) class ProductManager(models.Manager): """ Uses ProductQuerySet and proxies its methods to allow chaining Once Django 1.7 lands, this class can probably be removed: https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa """ def get_queryset(self): return ProductQuerySet(self.model, using=self._db) def browsable(self): return self.get_queryset().browsable() def base_queryset(self): return self.get_queryset().base_queryset() class BrowsableProductManager(ProductManager): """ Excludes non-canonical products Could be deprecated after Oscar 0.7 is released """ def get_queryset(self): return super(BrowsableProductManager, self).get_queryset().browsable()
Allow chaining of Product's custom querysets
Allow chaining of Product's custom querysets This aligns the implementation of Oscar specific QuerySet Methods with the implementation in current django core[1]. While this is not DRY, it does deliver on chainability and can be seen as preparation to take advantage of the improvements coming to this part of django in 1.7 [2] The original motivation for this was that using a custom QuerySet while trying not to copy code from oscar felt too hard. [1] https://github.com/django/django/blob/1.6.2/django/db/models/manager.py#L123 [2] https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager Fixes #1278. Deprecation comments added by @maikhoepfel
Python
bsd-3-clause
thechampanurag/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,WadeYuChen/django-oscar,josesanch/django-oscar,vovanbo/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,itbabu/django-oscar,WadeYuChen/django-oscar,anentropic/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,WadeYuChen/django-oscar,josesanch/django-oscar,eddiep1101/django-oscar,marcoantoniooliveira/labweb,michaelkuty/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,john-parton/django-oscar,MatthewWilkes/django-oscar,amirrpp/django-oscar,pasqualguerrero/django-oscar,taedori81/django-oscar,michaelkuty/django-oscar,vovanbo/django-oscar,taedori81/django-oscar,QLGu/django-oscar,monikasulik/django-oscar,django-oscar/django-oscar,saadatqadri/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,manevant/django-oscar,manevant/django-oscar,taedori81/django-oscar,ka7eh/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,mexeniz/django-oscar,eddiep1101/django-oscar,ademuk/django-oscar,rocopartners/django-oscar,nickpack/django-oscar,anentropic/django-oscar,MatthewWilkes/django-oscar,bschuon/django-oscar,DrOctogon/unwash_ecom,jlmadurga/django-oscar,bschuon/django-oscar,QLGu/django-oscar,pasqualguerrero/django-oscar,pdonadeo/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,lijoantony/django-oscar,saadatqadri/django-oscar,manevant/django-oscar,jinnykoo/wuyisj,thechampanurag/django-oscar,john-parton/django-oscar,jinnykoo/wuyisj.com,adamend/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,lijoantony/django-oscar,adamend/django-oscar,jmt4/django-oscar,sasha0/django-oscar,jlmadurga/django-oscar,okfish/django-oscar,josesanch/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,saadatqadri/django-oscar,sasha0/django-oscar,itbabu/django-oscar,mexeniz/django-oscar,anentropic/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,binarydud/django-oscar,machtfit/django-oscar,nickpack/django-oscar,adamend/django-oscar,nickpack/django-oscar,ahmetdaglarbas/e-commerce,kapari/django-oscar,jinnykoo/wuyisj.com,binarydud/django-oscar,spartonia/django-oscar,vovanbo/django-oscar,machtfit/django-oscar,kapt/django-oscar,monikasulik/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,vovanbo/django-oscar,manevant/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,eddiep1101/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,amirrpp/django-oscar,thechampanurag/django-oscar,lijoantony/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj.com,sonofatailor/django-oscar,amirrpp/django-oscar,nickpack/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,jinnykoo/wuyisj.com,pdonadeo/django-oscar,anentropic/django-oscar,Bogh/django-oscar,faratro/django-oscar,nfletton/django-oscar,jmt4/django-oscar,ademuk/django-oscar,jinnykoo/wuyisj,sonofatailor/django-oscar,rocopartners/django-oscar,faratro/django-oscar,jinnykoo/wuyisj,pdonadeo/django-oscar,itbabu/django-oscar,machtfit/django-oscar,MatthewWilkes/django-oscar,john-parton/django-oscar,ademuk/django-oscar,mexeniz/django-oscar,jinnykoo/christmas,solarissmoke/django-oscar,binarydud/django-oscar,taedori81/django-oscar,dongguangming/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,solarissmoke/django-oscar,Jannes123/django-oscar,bschuon/django-oscar,solarissmoke/django-oscar,nfletton/django-oscar,jinnykoo/christmas,thechampanurag/django-oscar,saadatqadri/django-oscar,kapt/django-oscar,sonofatailor/django-oscar,okfish/django-oscar,Bogh/django-oscar,spartonia/django-oscar,monikasulik/django-oscar,ka7eh/django-oscar,nfletton/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,sasha0/django-oscar,spartonia/django-oscar,faratro/django-oscar,pasqualguerrero/django-oscar,okfish/django-oscar,DrOctogon/unwash_ecom,django-oscar/django-oscar,ahmetdaglarbas/e-commerce,jinnykoo/christmas,faratro/django-oscar,ka7eh/django-oscar,ahmetdaglarbas/e-commerce,Jannes123/django-oscar,adamend/django-oscar,bnprk/django-oscar,kapari/django-oscar,rocopartners/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,kapt/django-oscar,django-oscar/django-oscar,bnprk/django-oscar,rocopartners/django-oscar,WillisXChen/django-oscar,bschuon/django-oscar,marcoantoniooliveira/labweb,binarydud/django-oscar,eddiep1101/django-oscar,ademuk/django-oscar,okfish/django-oscar,pdonadeo/django-oscar,WadeYuChen/django-oscar
from django.db import models - class ProductManager(models.Manager): + class ProductQuerySet(models.query.QuerySet): def base_queryset(self): """ - Return ``QuerySet`` with related content pre-loaded. + Applies select_related and prefetch_related for commonly related + models to save on queries """ - return self.get_query_set().select_related('product_class')\ + return self.select_related('product_class')\ .prefetch_related('variants', 'product_options', 'product_class__options', 'stockrecords', 'images', - ).all() + ) + + def browsable(self): + """ + Excludes non-canonical products. + """ + return self.filter(parent=None) + + + class ProductManager(models.Manager): + """ + Uses ProductQuerySet and proxies its methods to allow chaining + + Once Django 1.7 lands, this class can probably be removed: + https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa + """ + + def get_queryset(self): + return ProductQuerySet(self.model, using=self._db) + + def browsable(self): + return self.get_queryset().browsable() + + def base_queryset(self): + return self.get_queryset().base_queryset() class BrowsableProductManager(ProductManager): """ Excludes non-canonical products + + Could be deprecated after Oscar 0.7 is released """ - def get_query_set(self): + def get_queryset(self): - return super(BrowsableProductManager, self).get_query_set().filter( + return super(BrowsableProductManager, self).get_queryset().browsable() - parent=None)
Allow chaining of Product's custom querysets
## Code Before: from django.db import models class ProductManager(models.Manager): def base_queryset(self): """ Return ``QuerySet`` with related content pre-loaded. """ return self.get_query_set().select_related('product_class')\ .prefetch_related('variants', 'product_options', 'product_class__options', 'stockrecords', 'images', ).all() class BrowsableProductManager(ProductManager): """ Excludes non-canonical products """ def get_query_set(self): return super(BrowsableProductManager, self).get_query_set().filter( parent=None) ## Instruction: Allow chaining of Product's custom querysets ## Code After: from django.db import models class ProductQuerySet(models.query.QuerySet): def base_queryset(self): """ Applies select_related and prefetch_related for commonly related models to save on queries """ return self.select_related('product_class')\ .prefetch_related('variants', 'product_options', 'product_class__options', 'stockrecords', 'images', ) def browsable(self): """ Excludes non-canonical products. """ return self.filter(parent=None) class ProductManager(models.Manager): """ Uses ProductQuerySet and proxies its methods to allow chaining Once Django 1.7 lands, this class can probably be removed: https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa """ def get_queryset(self): return ProductQuerySet(self.model, using=self._db) def browsable(self): return self.get_queryset().browsable() def base_queryset(self): return self.get_queryset().base_queryset() class BrowsableProductManager(ProductManager): """ Excludes non-canonical products Could be deprecated after Oscar 0.7 is released """ def get_queryset(self): return super(BrowsableProductManager, self).get_queryset().browsable()
from django.db import models - class ProductManager(models.Manager): + class ProductQuerySet(models.query.QuerySet): def base_queryset(self): """ - Return ``QuerySet`` with related content pre-loaded. + Applies select_related and prefetch_related for commonly related + models to save on queries """ - return self.get_query_set().select_related('product_class')\ ? ---------------- + return self.select_related('product_class')\ .prefetch_related('variants', 'product_options', 'product_class__options', 'stockrecords', 'images', - ).all() ? ------ + ) + + def browsable(self): + """ + Excludes non-canonical products. + """ + return self.filter(parent=None) + + + class ProductManager(models.Manager): + """ + Uses ProductQuerySet and proxies its methods to allow chaining + + Once Django 1.7 lands, this class can probably be removed: + https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa + """ + + def get_queryset(self): + return ProductQuerySet(self.model, using=self._db) + + def browsable(self): + return self.get_queryset().browsable() + + def base_queryset(self): + return self.get_queryset().base_queryset() class BrowsableProductManager(ProductManager): """ Excludes non-canonical products + + Could be deprecated after Oscar 0.7 is released """ - def get_query_set(self): ? - + def get_queryset(self): - return super(BrowsableProductManager, self).get_query_set().filter( ? - ^^ - - + return super(BrowsableProductManager, self).get_queryset().browsable() ? ^^^^^^^ + - parent=None)
507be5c8923b05304b223785cdba79ae7513f48a
openedx/stanford/djangoapps/register_cme/admin.py
openedx/stanford/djangoapps/register_cme/admin.py
from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo class ExtraInfoAdmin(admin.ModelAdmin): """ Admin interface for ExtraInfo model. """ list_display = ('user', 'get_email', 'last_name', 'first_name',) search_fields = ('user__username', 'user__email', 'last_name', 'first_name',) def get_email(self, obj): return obj.user.email get_email.short_description = 'Email address' class Meta(object): model = ExtraInfo admin.site.register(ExtraInfo, ExtraInfoAdmin)
from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo admin.site.register(ExtraInfo)
Revert "Change `ExtraInfo` to user fields, add search"
Revert "Change `ExtraInfo` to user fields, add search" This reverts commit f5984fbd4187f4af65fb39b070f91870203d869b.
Python
agpl-3.0
caesar2164/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform
from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo + admin.site.register(ExtraInfo) - class ExtraInfoAdmin(admin.ModelAdmin): - """ Admin interface for ExtraInfo model. """ - list_display = ('user', 'get_email', 'last_name', 'first_name',) - search_fields = ('user__username', 'user__email', 'last_name', 'first_name',) - def get_email(self, obj): - return obj.user.email - get_email.short_description = 'Email address' - - class Meta(object): - model = ExtraInfo - - admin.site.register(ExtraInfo, ExtraInfoAdmin) -
Revert "Change `ExtraInfo` to user fields, add search"
## Code Before: from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo class ExtraInfoAdmin(admin.ModelAdmin): """ Admin interface for ExtraInfo model. """ list_display = ('user', 'get_email', 'last_name', 'first_name',) search_fields = ('user__username', 'user__email', 'last_name', 'first_name',) def get_email(self, obj): return obj.user.email get_email.short_description = 'Email address' class Meta(object): model = ExtraInfo admin.site.register(ExtraInfo, ExtraInfoAdmin) ## Instruction: Revert "Change `ExtraInfo` to user fields, add search" ## Code After: from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo admin.site.register(ExtraInfo)
from __future__ import unicode_literals from django.contrib import admin from .models import ExtraInfo - class ExtraInfoAdmin(admin.ModelAdmin): - """ Admin interface for ExtraInfo model. """ - list_display = ('user', 'get_email', 'last_name', 'first_name',) - search_fields = ('user__username', 'user__email', 'last_name', 'first_name',) - - def get_email(self, obj): - return obj.user.email - get_email.short_description = 'Email address' - - class Meta(object): - model = ExtraInfo - - admin.site.register(ExtraInfo, ExtraInfoAdmin) ? ---------------- + admin.site.register(ExtraInfo)
7c75da48d6746fc148a79051338c3cd554d75615
accounts/views.py
accounts/views.py
from django.shortcuts import redirect from django.contrib.auth import logout as auth_logout from django.conf import settings def logout(request): """Logs out user redirects if in request""" r = request.GET.get('r', '') auth_logout(request) if r: return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r)) else: return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
from django.shortcuts import redirect from django.contrib.auth import logout as auth_logout from django.conf import settings def logout(request): """Logs out user redirects if in request""" next = request.GET.get('next', '') auth_logout(request) if next: return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next)) else: return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
Change variable name to next for logout function
Change variable name to next for logout function
Python
agpl-3.0
openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms
from django.shortcuts import redirect from django.contrib.auth import logout as auth_logout from django.conf import settings def logout(request): """Logs out user redirects if in request""" - r = request.GET.get('r', '') + next = request.GET.get('next', '') auth_logout(request) - if r: + if next: - return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r)) + return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next)) else: return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
Change variable name to next for logout function
## Code Before: from django.shortcuts import redirect from django.contrib.auth import logout as auth_logout from django.conf import settings def logout(request): """Logs out user redirects if in request""" r = request.GET.get('r', '') auth_logout(request) if r: return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r)) else: return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL) ## Instruction: Change variable name to next for logout function ## Code After: from django.shortcuts import redirect from django.contrib.auth import logout as auth_logout from django.conf import settings def logout(request): """Logs out user redirects if in request""" next = request.GET.get('next', '') auth_logout(request) if next: return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next)) else: return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
from django.shortcuts import redirect from django.contrib.auth import logout as auth_logout from django.conf import settings def logout(request): """Logs out user redirects if in request""" - r = request.GET.get('r', '') ? ^ ^ + next = request.GET.get('next', '') ? ^^^^ ^^^^ auth_logout(request) - if r: ? ^ + if next: ? ^^^^ - return redirect('{}/?r={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, r)) ? ^ ^ + return redirect('{}/?next={}'.format(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL, next)) ? ^^^^ ^^^^ else: return redirect(settings.OPENSTAX_ACCOUNTS_LOGOUT_URL)
f6ddb5b76265d7597568d6169ed877e04c565f4a
games/managers.py
games/managers.py
from django.db.models import Manager class ScreenshotManager(Manager): def published(self): return self.get_query_set().filter(published=True)
from django.db.models import Manager class ScreenshotManager(Manager): def published(self): return self.get_query_set().filter(published=True).order_by('uploaded_at')
Order screenshots by ascending upload time in the front-end
Order screenshots by ascending upload time in the front-end So that it's easy to order them intentionally. :) ... Until we come up with a better ordering solution, with weights or something.
Python
agpl-3.0
lutris/website,Turupawn/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website
from django.db.models import Manager class ScreenshotManager(Manager): def published(self): - return self.get_query_set().filter(published=True) + return self.get_query_set().filter(published=True).order_by('uploaded_at')
Order screenshots by ascending upload time in the front-end
## Code Before: from django.db.models import Manager class ScreenshotManager(Manager): def published(self): return self.get_query_set().filter(published=True) ## Instruction: Order screenshots by ascending upload time in the front-end ## Code After: from django.db.models import Manager class ScreenshotManager(Manager): def published(self): return self.get_query_set().filter(published=True).order_by('uploaded_at')
from django.db.models import Manager class ScreenshotManager(Manager): def published(self): - return self.get_query_set().filter(published=True) + return self.get_query_set().filter(published=True).order_by('uploaded_at') ? ++++++++++++++++++++++++
d1e2aacb7926a7e751cd27eb562b2c5d86f7e1e8
opal/tests/test_core_test_runner.py
opal/tests/test_core_test_runner.py
import ffs from mock import MagicMock, patch from opal.core.test import OpalTestCase from opal.core import test_runner class RunPyTestsTestCase(OpalTestCase): @patch('subprocess.check_call') def test_run_tests(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = None test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py']) class RunJSTestsTestCase(OpalTestCase): pass class RunTestsTestCase(OpalTestCase): pass
import ffs from mock import MagicMock, patch from opal.core.test import OpalTestCase from opal.core import test_runner class RunPyTestsTestCase(OpalTestCase): @patch('subprocess.check_call') def test_run_tests(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = None test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py']) @patch('subprocess.check_call') def test_run_tests_with_test_arg(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = 'opal.tests.foo' test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo']) class RunJSTestsTestCase(OpalTestCase): pass class RunTestsTestCase(OpalTestCase): pass
Add test for opal test py -t
Add test for opal test py -t
Python
agpl-3.0
khchine5/opal,khchine5/opal,khchine5/opal
import ffs from mock import MagicMock, patch from opal.core.test import OpalTestCase from opal.core import test_runner class RunPyTestsTestCase(OpalTestCase): @patch('subprocess.check_call') def test_run_tests(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = None test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py']) + @patch('subprocess.check_call') + def test_run_tests_with_test_arg(self, check_call): + mock_args = MagicMock(name="args") + mock_args.userland_here = ffs.Path('.') + mock_args.coverage = False + mock_args.test = 'opal.tests.foo' + test_runner._run_py_tests(mock_args) + check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo']) + + + class RunJSTestsTestCase(OpalTestCase): pass class RunTestsTestCase(OpalTestCase): pass
Add test for opal test py -t
## Code Before: import ffs from mock import MagicMock, patch from opal.core.test import OpalTestCase from opal.core import test_runner class RunPyTestsTestCase(OpalTestCase): @patch('subprocess.check_call') def test_run_tests(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = None test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py']) class RunJSTestsTestCase(OpalTestCase): pass class RunTestsTestCase(OpalTestCase): pass ## Instruction: Add test for opal test py -t ## Code After: import ffs from mock import MagicMock, patch from opal.core.test import OpalTestCase from opal.core import test_runner class RunPyTestsTestCase(OpalTestCase): @patch('subprocess.check_call') def test_run_tests(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = None test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py']) @patch('subprocess.check_call') def test_run_tests_with_test_arg(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = 'opal.tests.foo' test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo']) class RunJSTestsTestCase(OpalTestCase): pass class RunTestsTestCase(OpalTestCase): pass
import ffs from mock import MagicMock, patch from opal.core.test import OpalTestCase from opal.core import test_runner class RunPyTestsTestCase(OpalTestCase): @patch('subprocess.check_call') def test_run_tests(self, check_call): mock_args = MagicMock(name="args") mock_args.userland_here = ffs.Path('.') mock_args.coverage = False mock_args.test = None test_runner._run_py_tests(mock_args) check_call.assert_called_once_with(['python', 'runtests.py']) + @patch('subprocess.check_call') + def test_run_tests_with_test_arg(self, check_call): + mock_args = MagicMock(name="args") + mock_args.userland_here = ffs.Path('.') + mock_args.coverage = False + mock_args.test = 'opal.tests.foo' + test_runner._run_py_tests(mock_args) + check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo']) + + + class RunJSTestsTestCase(OpalTestCase): pass class RunTestsTestCase(OpalTestCase): pass
fe442d84140b0a588c6a8490b58a10995df58f17
tests/optimizers/test_constant_optimizer.py
tests/optimizers/test_constant_optimizer.py
"""Test suite for optimizers.constant.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import ast import pytest from pycc.asttools import parse from pycc.optimizers import constant source = """ ONE = 1 TWO = 2 THREE = ONE + TWO FOUR = THREE + ONE FIVE = THREE + TWO def return_const(): return FOUR def return_var(): return FIVE FIVE = FIVE + ONE FIVE -= ONE """ @pytest.fixture def node(): """Get as AST node from the source.""" return parse.parse(source) def test_constant_inliner(node): """Test that constant values are inlined.""" constant.ConstantOptimizer()(node) # Check assignment values using constants. assert node.body[2].value.n == 3 assert node.body[3].value.n == 4 assert node.body[4].value.n == 5 # Check return val of const function. assert isinstance(node.body[5].body[0].value, ast.Num) assert node.body[5].body[0].value.n == 4 # Check return val of var function. assert isinstance(node.body[6].body[0].value, ast.Name) assert node.body[6].body[0].value.id == 'FIVE'
"""Test suite for optimizers.constant.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import ast import pytest from pycc.asttools import parse from pycc.optimizers import constant source = """ ONE = 1 TWO = 2 THREE = ONE + TWO FOUR = THREE + ONE FIVE = THREE + TWO def return_const(): return FOUR def return_var(): return FIVE FIVE = FIVE + ONE FIVE -= ONE """ @pytest.fixture def node(): """Get as AST node from the source.""" return parse.parse(source) def test_constant_inliner(node): """Test that constant values are inlined.""" constant.optimize(node) # Check assignment values using constants. assert node.body[2].value.n == 3 assert node.body[3].value.n == 4 assert node.body[4].value.n == 5 # Check return val of const function. assert isinstance(node.body[5].body[0].value, ast.Num) assert node.body[5].body[0].value.n == 4 # Check return val of var function. assert isinstance(node.body[6].body[0].value, ast.Name) assert node.body[6].body[0].value.id == 'FIVE'
Fix test to use new optimizer interface
Fix test to use new optimizer interface Signed-off-by: Kevin Conway <[email protected]>
Python
apache-2.0
kevinconway/pycc,kevinconway/pycc
"""Test suite for optimizers.constant.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import ast import pytest from pycc.asttools import parse from pycc.optimizers import constant source = """ ONE = 1 TWO = 2 THREE = ONE + TWO FOUR = THREE + ONE FIVE = THREE + TWO def return_const(): return FOUR def return_var(): return FIVE FIVE = FIVE + ONE FIVE -= ONE """ @pytest.fixture def node(): """Get as AST node from the source.""" return parse.parse(source) def test_constant_inliner(node): """Test that constant values are inlined.""" - constant.ConstantOptimizer()(node) + constant.optimize(node) # Check assignment values using constants. assert node.body[2].value.n == 3 assert node.body[3].value.n == 4 assert node.body[4].value.n == 5 # Check return val of const function. assert isinstance(node.body[5].body[0].value, ast.Num) assert node.body[5].body[0].value.n == 4 # Check return val of var function. assert isinstance(node.body[6].body[0].value, ast.Name) assert node.body[6].body[0].value.id == 'FIVE'
Fix test to use new optimizer interface
## Code Before: """Test suite for optimizers.constant.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import ast import pytest from pycc.asttools import parse from pycc.optimizers import constant source = """ ONE = 1 TWO = 2 THREE = ONE + TWO FOUR = THREE + ONE FIVE = THREE + TWO def return_const(): return FOUR def return_var(): return FIVE FIVE = FIVE + ONE FIVE -= ONE """ @pytest.fixture def node(): """Get as AST node from the source.""" return parse.parse(source) def test_constant_inliner(node): """Test that constant values are inlined.""" constant.ConstantOptimizer()(node) # Check assignment values using constants. assert node.body[2].value.n == 3 assert node.body[3].value.n == 4 assert node.body[4].value.n == 5 # Check return val of const function. assert isinstance(node.body[5].body[0].value, ast.Num) assert node.body[5].body[0].value.n == 4 # Check return val of var function. assert isinstance(node.body[6].body[0].value, ast.Name) assert node.body[6].body[0].value.id == 'FIVE' ## Instruction: Fix test to use new optimizer interface ## Code After: """Test suite for optimizers.constant.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import ast import pytest from pycc.asttools import parse from pycc.optimizers import constant source = """ ONE = 1 TWO = 2 THREE = ONE + TWO FOUR = THREE + ONE FIVE = THREE + TWO def return_const(): return FOUR def return_var(): return FIVE FIVE = FIVE + ONE FIVE -= ONE """ @pytest.fixture def node(): """Get as AST node from the source.""" return parse.parse(source) def test_constant_inliner(node): """Test that constant values are inlined.""" constant.optimize(node) # Check assignment values using constants. assert node.body[2].value.n == 3 assert node.body[3].value.n == 4 assert node.body[4].value.n == 5 # Check return val of const function. assert isinstance(node.body[5].body[0].value, ast.Num) assert node.body[5].body[0].value.n == 4 # Check return val of var function. assert isinstance(node.body[6].body[0].value, ast.Name) assert node.body[6].body[0].value.id == 'FIVE'
"""Test suite for optimizers.constant.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import ast import pytest from pycc.asttools import parse from pycc.optimizers import constant source = """ ONE = 1 TWO = 2 THREE = ONE + TWO FOUR = THREE + ONE FIVE = THREE + TWO def return_const(): return FOUR def return_var(): return FIVE FIVE = FIVE + ONE FIVE -= ONE """ @pytest.fixture def node(): """Get as AST node from the source.""" return parse.parse(source) def test_constant_inliner(node): """Test that constant values are inlined.""" - constant.ConstantOptimizer()(node) ? - ------- --- + constant.optimize(node) # Check assignment values using constants. assert node.body[2].value.n == 3 assert node.body[3].value.n == 4 assert node.body[4].value.n == 5 # Check return val of const function. assert isinstance(node.body[5].body[0].value, ast.Num) assert node.body[5].body[0].value.n == 4 # Check return val of var function. assert isinstance(node.body[6].body[0].value, ast.Name) assert node.body[6].body[0].value.id == 'FIVE'
96cbe6cd5b1d86663fe44c7fb4351fdb9bf7b2eb
metafunctions/map.py
metafunctions/map.py
import typing as tp import itertools from metafunctions.concurrent import FunctionMerge from metafunctions.operators import concat class MergeMap(FunctionMerge): def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat): super().__init__(merge_function, (function, )) def _get_call_iterators(self, args): ''' Each element in args is an iterable. ''' args_iter = zip(*args) # Note that EVERY element in the func iter will be called, so we need to make sure the # length of our iterator is the same as the shortest iterable we received. shortest_arg = min(args, key=len) func_iter = itertools.repeat(self.functions[0], len(shortest_arg)) return args_iter, func_iter def _call_function(self, f, args:tuple, kwargs:dict): '''In MergeMap, args will be a single element tuple containing the args for this function. ''' return f(*args[0], **kwargs) def __str__(self): return f'mmap({self.functions[0]!s})' def __repr__(self): return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
import typing as tp import itertools from metafunctions.concurrent import FunctionMerge from metafunctions.operators import concat class MergeMap(FunctionMerge): def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat): ''' MergeMap is a FunctionMerge with only one function. When called, it behaves like the builtin `map` function and calls its function once per item in the iterable(s) it receives. ''' super().__init__(merge_function, (function, )) def _get_call_iterators(self, args): ''' Each element in args is an iterable. ''' args_iter = zip(*args) # Note that EVERY element in the func iter will be called, so we need to make sure the # length of our iterator is the same as the shortest iterable we received. shortest_arg = min(args, key=len) func_iter = itertools.repeat(self.functions[0], len(shortest_arg)) return args_iter, func_iter def _call_function(self, f, args:tuple, kwargs:dict): '''In MergeMap, args will be a single element tuple containing the args for this function. ''' return f(*args[0], **kwargs) def __str__(self): return f'mmap({self.functions[0]!s})' def __repr__(self): return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
Add a docstring for MergeMap
Add a docstring for MergeMap
Python
mit
ForeverWintr/metafunctions
import typing as tp import itertools from metafunctions.concurrent import FunctionMerge from metafunctions.operators import concat class MergeMap(FunctionMerge): def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat): + ''' + MergeMap is a FunctionMerge with only one function. When called, it behaves like the + builtin `map` function and calls its function once per item in the iterable(s) it receives. + ''' super().__init__(merge_function, (function, )) def _get_call_iterators(self, args): ''' Each element in args is an iterable. ''' args_iter = zip(*args) # Note that EVERY element in the func iter will be called, so we need to make sure the # length of our iterator is the same as the shortest iterable we received. shortest_arg = min(args, key=len) func_iter = itertools.repeat(self.functions[0], len(shortest_arg)) return args_iter, func_iter def _call_function(self, f, args:tuple, kwargs:dict): '''In MergeMap, args will be a single element tuple containing the args for this function. ''' return f(*args[0], **kwargs) def __str__(self): return f'mmap({self.functions[0]!s})' def __repr__(self): return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
Add a docstring for MergeMap
## Code Before: import typing as tp import itertools from metafunctions.concurrent import FunctionMerge from metafunctions.operators import concat class MergeMap(FunctionMerge): def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat): super().__init__(merge_function, (function, )) def _get_call_iterators(self, args): ''' Each element in args is an iterable. ''' args_iter = zip(*args) # Note that EVERY element in the func iter will be called, so we need to make sure the # length of our iterator is the same as the shortest iterable we received. shortest_arg = min(args, key=len) func_iter = itertools.repeat(self.functions[0], len(shortest_arg)) return args_iter, func_iter def _call_function(self, f, args:tuple, kwargs:dict): '''In MergeMap, args will be a single element tuple containing the args for this function. ''' return f(*args[0], **kwargs) def __str__(self): return f'mmap({self.functions[0]!s})' def __repr__(self): return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})' ## Instruction: Add a docstring for MergeMap ## Code After: import typing as tp import itertools from metafunctions.concurrent import FunctionMerge from metafunctions.operators import concat class MergeMap(FunctionMerge): def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat): ''' MergeMap is a FunctionMerge with only one function. When called, it behaves like the builtin `map` function and calls its function once per item in the iterable(s) it receives. ''' super().__init__(merge_function, (function, )) def _get_call_iterators(self, args): ''' Each element in args is an iterable. ''' args_iter = zip(*args) # Note that EVERY element in the func iter will be called, so we need to make sure the # length of our iterator is the same as the shortest iterable we received. shortest_arg = min(args, key=len) func_iter = itertools.repeat(self.functions[0], len(shortest_arg)) return args_iter, func_iter def _call_function(self, f, args:tuple, kwargs:dict): '''In MergeMap, args will be a single element tuple containing the args for this function. ''' return f(*args[0], **kwargs) def __str__(self): return f'mmap({self.functions[0]!s})' def __repr__(self): return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
import typing as tp import itertools from metafunctions.concurrent import FunctionMerge from metafunctions.operators import concat class MergeMap(FunctionMerge): def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat): + ''' + MergeMap is a FunctionMerge with only one function. When called, it behaves like the + builtin `map` function and calls its function once per item in the iterable(s) it receives. + ''' super().__init__(merge_function, (function, )) def _get_call_iterators(self, args): ''' Each element in args is an iterable. ''' args_iter = zip(*args) # Note that EVERY element in the func iter will be called, so we need to make sure the # length of our iterator is the same as the shortest iterable we received. shortest_arg = min(args, key=len) func_iter = itertools.repeat(self.functions[0], len(shortest_arg)) return args_iter, func_iter def _call_function(self, f, args:tuple, kwargs:dict): '''In MergeMap, args will be a single element tuple containing the args for this function. ''' return f(*args[0], **kwargs) def __str__(self): return f'mmap({self.functions[0]!s})' def __repr__(self): return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
d8c15667e76ce6d0dfa96a16312e75b83c63479b
tests/test_response.py
tests/test_response.py
from unittest.mock import patch from flask import Flask from flask_assistant import Assistant from flask_assistant.response import _Response @patch('flask_assistant.response.current_app') def test_response_with_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response('foobar') assert resp._response['speech'] == 'foobar' @patch('flask_assistant.response.current_app') def test_response_with_None_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response(None) assert resp._response['speech'] is None @patch('flask_assistant.response.current_app') def test_response_speech_escaping(mock): mock = Assistant(Flask(__name__)) resp = _Response('foo & bar') assert resp._response['speech'] == 'foo &amp; bar'
from flask import Flask from flask_assistant import Assistant from flask_assistant.response import _Response import pytest patch = pytest.importorskip('unittest.mock.patch') @patch('flask_assistant.response.current_app') def test_response_with_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response('foobar') assert resp._response['speech'] == 'foobar' @patch('flask_assistant.response.current_app') def test_response_with_None_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response(None) assert resp._response['speech'] is None @patch('flask_assistant.response.current_app') def test_response_speech_escaping(mock): mock = Assistant(Flask(__name__)) resp = _Response('foo & bar') assert resp._response['speech'] == 'foo &amp; bar'
Disable test for py27 (mock not available)
Disable test for py27 (mock not available)
Python
apache-2.0
treethought/flask-assistant
- from unittest.mock import patch - from flask import Flask from flask_assistant import Assistant from flask_assistant.response import _Response + import pytest + + patch = pytest.importorskip('unittest.mock.patch') @patch('flask_assistant.response.current_app') def test_response_with_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response('foobar') assert resp._response['speech'] == 'foobar' @patch('flask_assistant.response.current_app') def test_response_with_None_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response(None) assert resp._response['speech'] is None @patch('flask_assistant.response.current_app') def test_response_speech_escaping(mock): mock = Assistant(Flask(__name__)) resp = _Response('foo & bar') assert resp._response['speech'] == 'foo &amp; bar'
Disable test for py27 (mock not available)
## Code Before: from unittest.mock import patch from flask import Flask from flask_assistant import Assistant from flask_assistant.response import _Response @patch('flask_assistant.response.current_app') def test_response_with_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response('foobar') assert resp._response['speech'] == 'foobar' @patch('flask_assistant.response.current_app') def test_response_with_None_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response(None) assert resp._response['speech'] is None @patch('flask_assistant.response.current_app') def test_response_speech_escaping(mock): mock = Assistant(Flask(__name__)) resp = _Response('foo & bar') assert resp._response['speech'] == 'foo &amp; bar' ## Instruction: Disable test for py27 (mock not available) ## Code After: from flask import Flask from flask_assistant import Assistant from flask_assistant.response import _Response import pytest patch = pytest.importorskip('unittest.mock.patch') @patch('flask_assistant.response.current_app') def test_response_with_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response('foobar') assert resp._response['speech'] == 'foobar' @patch('flask_assistant.response.current_app') def test_response_with_None_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response(None) assert resp._response['speech'] is None @patch('flask_assistant.response.current_app') def test_response_speech_escaping(mock): mock = Assistant(Flask(__name__)) resp = _Response('foo & bar') assert resp._response['speech'] == 'foo &amp; bar'
- from unittest.mock import patch - from flask import Flask from flask_assistant import Assistant from flask_assistant.response import _Response + import pytest + + patch = pytest.importorskip('unittest.mock.patch') @patch('flask_assistant.response.current_app') def test_response_with_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response('foobar') assert resp._response['speech'] == 'foobar' @patch('flask_assistant.response.current_app') def test_response_with_None_speech(mock): mock = Assistant(Flask(__name__)) resp = _Response(None) assert resp._response['speech'] is None @patch('flask_assistant.response.current_app') def test_response_speech_escaping(mock): mock = Assistant(Flask(__name__)) resp = _Response('foo & bar') assert resp._response['speech'] == 'foo &amp; bar'
100a2ef97d499a87d3fae271f794de050f1c5686
opps/sitemaps/urls.py
opps/sitemaps/urls.py
from django.conf.urls import patterns, url from django.contrib.sitemaps import views as sitemap_views from opps.core.cache import cache_page from opps.sitemaps.sitemaps import GenericSitemap, InfoDisct sitemaps = { 'articles': GenericSitemap(InfoDisct(), priority=0.6), } sitemaps_googlenews = { 'articles': GenericSitemap(InfoDisct(True), priority=0.6), } urlpatterns = patterns( '', url(r'^\.xml$', cache_page(86400)(sitemap_views.index), {'sitemaps': sitemaps}), url(r'^-googlenews\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps_googlenews, 'template_name': 'sitemap_googlenews.xml'}), url(r'^-(?P<section>.+)\.xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}), )
from django.conf.urls import patterns, url from django.contrib.sitemaps import views as sitemap_views from opps.core.cache import cache_page from opps.sitemaps.sitemaps import GenericSitemap, InfoDisct sitemaps = { 'containers': GenericSitemap(InfoDisct(), priority=0.6), } sitemaps_googlenews = { 'containers': GenericSitemap(InfoDisct(True), priority=0.6), } urlpatterns = patterns( '', url(r'^\.xml$', cache_page(86400)(sitemap_views.index), {'sitemaps': sitemaps}), url(r'^-googlenews\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps_googlenews, 'template_name': 'sitemap_googlenews.xml'}), url(r'^-(?P<section>.+)\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps}), )
Add cache in sitemap section
Add cache in sitemap section
Python
mit
williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps,opps/opps
from django.conf.urls import patterns, url from django.contrib.sitemaps import views as sitemap_views from opps.core.cache import cache_page from opps.sitemaps.sitemaps import GenericSitemap, InfoDisct sitemaps = { - 'articles': GenericSitemap(InfoDisct(), priority=0.6), + 'containers': GenericSitemap(InfoDisct(), priority=0.6), } sitemaps_googlenews = { - 'articles': GenericSitemap(InfoDisct(True), priority=0.6), + 'containers': GenericSitemap(InfoDisct(True), priority=0.6), } urlpatterns = patterns( '', url(r'^\.xml$', cache_page(86400)(sitemap_views.index), {'sitemaps': sitemaps}), url(r'^-googlenews\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps_googlenews, 'template_name': 'sitemap_googlenews.xml'}), - url(r'^-(?P<section>.+)\.xml$', sitemap_views.sitemap, + url(r'^-(?P<section>.+)\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps}), )
Add cache in sitemap section
## Code Before: from django.conf.urls import patterns, url from django.contrib.sitemaps import views as sitemap_views from opps.core.cache import cache_page from opps.sitemaps.sitemaps import GenericSitemap, InfoDisct sitemaps = { 'articles': GenericSitemap(InfoDisct(), priority=0.6), } sitemaps_googlenews = { 'articles': GenericSitemap(InfoDisct(True), priority=0.6), } urlpatterns = patterns( '', url(r'^\.xml$', cache_page(86400)(sitemap_views.index), {'sitemaps': sitemaps}), url(r'^-googlenews\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps_googlenews, 'template_name': 'sitemap_googlenews.xml'}), url(r'^-(?P<section>.+)\.xml$', sitemap_views.sitemap, {'sitemaps': sitemaps}), ) ## Instruction: Add cache in sitemap section ## Code After: from django.conf.urls import patterns, url from django.contrib.sitemaps import views as sitemap_views from opps.core.cache import cache_page from opps.sitemaps.sitemaps import GenericSitemap, InfoDisct sitemaps = { 'containers': GenericSitemap(InfoDisct(), priority=0.6), } sitemaps_googlenews = { 'containers': GenericSitemap(InfoDisct(True), priority=0.6), } urlpatterns = patterns( '', url(r'^\.xml$', cache_page(86400)(sitemap_views.index), {'sitemaps': sitemaps}), url(r'^-googlenews\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps_googlenews, 'template_name': 'sitemap_googlenews.xml'}), url(r'^-(?P<section>.+)\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps}), )
from django.conf.urls import patterns, url from django.contrib.sitemaps import views as sitemap_views from opps.core.cache import cache_page from opps.sitemaps.sitemaps import GenericSitemap, InfoDisct sitemaps = { - 'articles': GenericSitemap(InfoDisct(), priority=0.6), ? ----- + 'containers': GenericSitemap(InfoDisct(), priority=0.6), ? ++++ +++ } sitemaps_googlenews = { - 'articles': GenericSitemap(InfoDisct(True), priority=0.6), ? ----- + 'containers': GenericSitemap(InfoDisct(True), priority=0.6), ? ++++ +++ } urlpatterns = patterns( '', url(r'^\.xml$', cache_page(86400)(sitemap_views.index), {'sitemaps': sitemaps}), url(r'^-googlenews\.xml$', cache_page(86400)(sitemap_views.sitemap), {'sitemaps': sitemaps_googlenews, 'template_name': 'sitemap_googlenews.xml'}), - url(r'^-(?P<section>.+)\.xml$', sitemap_views.sitemap, + url(r'^-(?P<section>.+)\.xml$', cache_page(86400)(sitemap_views.sitemap), ? ++++++++++++++++++ + {'sitemaps': sitemaps}), )
3d281a3524a4cd1bf9e2f60767ec8d89d3b589e2
tools/wcloud/wcloud/deploymentsettings.py
tools/wcloud/wcloud/deploymentsettings.py
from weblab.admin.script import Creation APACHE_CONF_NAME = 'apache.conf' MIN_PORT = 10000 DEFAULT_DEPLOYMENT_SETTINGS = { Creation.COORD_ENGINE: 'redis', Creation.COORD_REDIS_DB: 0, Creation.COORD_REDIS_PORT: 6379, Creation.DB_ENGINE: 'mysql', Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga) Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga) Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga) Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000 Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios) Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings) Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/ Creation.CORES: 3, }
from weblab.admin.script import Creation APACHE_CONF_NAME = 'apache.conf' MIN_PORT = 10000 DEFAULT_DEPLOYMENT_SETTINGS = { Creation.COORD_ENGINE: 'redis', Creation.COORD_REDIS_DB: 0, Creation.COORD_REDIS_PORT: 6379, Creation.DB_ENGINE: 'mysql', Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga) Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga) Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga) Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000 Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios) Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings) Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/ Creation.CORES: 3, Creation.ADD_FEDERATED_LOGIC : True, Creation.ADD_FEDERATED_VISIR : True, Creation.ADD_FEDERATED_SUBMARINE : True, }
Add logic, visir and submarine to wcloud
Add logic, visir and submarine to wcloud
Python
bsd-2-clause
morelab/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto
from weblab.admin.script import Creation APACHE_CONF_NAME = 'apache.conf' MIN_PORT = 10000 DEFAULT_DEPLOYMENT_SETTINGS = { Creation.COORD_ENGINE: 'redis', Creation.COORD_REDIS_DB: 0, Creation.COORD_REDIS_PORT: 6379, Creation.DB_ENGINE: 'mysql', Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga) Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga) Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga) Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000 Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios) Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings) Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/ Creation.CORES: 3, - + Creation.ADD_FEDERATED_LOGIC : True, + Creation.ADD_FEDERATED_VISIR : True, + Creation.ADD_FEDERATED_SUBMARINE : True, }
Add logic, visir and submarine to wcloud
## Code Before: from weblab.admin.script import Creation APACHE_CONF_NAME = 'apache.conf' MIN_PORT = 10000 DEFAULT_DEPLOYMENT_SETTINGS = { Creation.COORD_ENGINE: 'redis', Creation.COORD_REDIS_DB: 0, Creation.COORD_REDIS_PORT: 6379, Creation.DB_ENGINE: 'mysql', Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga) Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga) Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga) Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000 Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios) Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings) Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/ Creation.CORES: 3, } ## Instruction: Add logic, visir and submarine to wcloud ## Code After: from weblab.admin.script import Creation APACHE_CONF_NAME = 'apache.conf' MIN_PORT = 10000 DEFAULT_DEPLOYMENT_SETTINGS = { Creation.COORD_ENGINE: 'redis', Creation.COORD_REDIS_DB: 0, Creation.COORD_REDIS_PORT: 6379, Creation.DB_ENGINE: 'mysql', Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga) Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga) Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga) Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000 Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios) Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings) Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/ Creation.CORES: 3, Creation.ADD_FEDERATED_LOGIC : True, Creation.ADD_FEDERATED_VISIR : True, Creation.ADD_FEDERATED_SUBMARINE : True, }
from weblab.admin.script import Creation APACHE_CONF_NAME = 'apache.conf' MIN_PORT = 10000 DEFAULT_DEPLOYMENT_SETTINGS = { Creation.COORD_ENGINE: 'redis', Creation.COORD_REDIS_DB: 0, Creation.COORD_REDIS_PORT: 6379, Creation.DB_ENGINE: 'mysql', Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga) Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga) Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga) Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000 Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios) Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings) Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/ Creation.CORES: 3, - + Creation.ADD_FEDERATED_LOGIC : True, + Creation.ADD_FEDERATED_VISIR : True, + Creation.ADD_FEDERATED_SUBMARINE : True, }
b612d7a6d67e999f96917de642230946ccf02106
qnd/experiment.py
qnd/experiment.py
import tensorflow as tf from .flag import FLAGS, FlagAdder from .estimator import def_estimator from .inputs import def_def_train_input_fn, def_def_eval_input_fn def def_def_experiment_fn(batch_inputs=True, prepare_filename_queues=True, distributed=False): adder = FlagAdder() for mode in [tf.contrib.learn.ModeKeys.TRAIN, tf.contrib.learn.ModeKeys.EVAL]: adder.add_flag("{}_steps".format(mode), type=int, help="Maximum number of {} steps".format(mode)) adder.add_flag("min_eval_frequency", type=int, default=1, help="Minimum evaluation frequency in number of model " "savings") estimator = def_estimator(distributed) def_train_input_fn = def_def_train_input_fn(batch_inputs, prepare_filename_queues) def_eval_input_fn = def_def_eval_input_fn(batch_inputs, prepare_filename_queues) def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None): def experiment_fn(output_dir): return tf.contrib.learn.Experiment( estimator(model_fn, output_dir), def_train_input_fn(train_input_fn), def_eval_input_fn(eval_input_fn or train_input_fn), **adder.flags) return experiment_fn return def_experiment_fn
import tensorflow as tf from .flag import FLAGS, FlagAdder from .estimator import def_estimator from .inputs import def_def_train_input_fn, def_def_eval_input_fn def def_def_experiment_fn(batch_inputs=True, prepare_filename_queues=True, distributed=False): adder = FlagAdder() for mode in [tf.contrib.learn.ModeKeys.TRAIN, tf.contrib.learn.ModeKeys.EVAL]: adder.add_flag("{}_steps".format(mode), type=int, help="Maximum number of {} steps".format(mode)) adder.add_flag("min_eval_frequency", type=int, default=1, help="Minimum evaluation frequency in number of train steps") estimator = def_estimator(distributed) def_train_input_fn = def_def_train_input_fn(batch_inputs, prepare_filename_queues) def_eval_input_fn = def_def_eval_input_fn(batch_inputs, prepare_filename_queues) def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None): def experiment_fn(output_dir): return tf.contrib.learn.Experiment( estimator(model_fn, output_dir), def_train_input_fn(train_input_fn), def_eval_input_fn(eval_input_fn or train_input_fn), **adder.flags) return experiment_fn return def_experiment_fn
Fix help message of --min_eval_frequency flag
Fix help message of --min_eval_frequency flag
Python
unlicense
raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd
import tensorflow as tf from .flag import FLAGS, FlagAdder from .estimator import def_estimator from .inputs import def_def_train_input_fn, def_def_eval_input_fn def def_def_experiment_fn(batch_inputs=True, prepare_filename_queues=True, distributed=False): adder = FlagAdder() for mode in [tf.contrib.learn.ModeKeys.TRAIN, tf.contrib.learn.ModeKeys.EVAL]: adder.add_flag("{}_steps".format(mode), type=int, help="Maximum number of {} steps".format(mode)) adder.add_flag("min_eval_frequency", type=int, default=1, - help="Minimum evaluation frequency in number of model " + help="Minimum evaluation frequency in number of train steps") - "savings") estimator = def_estimator(distributed) def_train_input_fn = def_def_train_input_fn(batch_inputs, prepare_filename_queues) def_eval_input_fn = def_def_eval_input_fn(batch_inputs, prepare_filename_queues) def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None): def experiment_fn(output_dir): return tf.contrib.learn.Experiment( estimator(model_fn, output_dir), def_train_input_fn(train_input_fn), def_eval_input_fn(eval_input_fn or train_input_fn), **adder.flags) return experiment_fn return def_experiment_fn
Fix help message of --min_eval_frequency flag
## Code Before: import tensorflow as tf from .flag import FLAGS, FlagAdder from .estimator import def_estimator from .inputs import def_def_train_input_fn, def_def_eval_input_fn def def_def_experiment_fn(batch_inputs=True, prepare_filename_queues=True, distributed=False): adder = FlagAdder() for mode in [tf.contrib.learn.ModeKeys.TRAIN, tf.contrib.learn.ModeKeys.EVAL]: adder.add_flag("{}_steps".format(mode), type=int, help="Maximum number of {} steps".format(mode)) adder.add_flag("min_eval_frequency", type=int, default=1, help="Minimum evaluation frequency in number of model " "savings") estimator = def_estimator(distributed) def_train_input_fn = def_def_train_input_fn(batch_inputs, prepare_filename_queues) def_eval_input_fn = def_def_eval_input_fn(batch_inputs, prepare_filename_queues) def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None): def experiment_fn(output_dir): return tf.contrib.learn.Experiment( estimator(model_fn, output_dir), def_train_input_fn(train_input_fn), def_eval_input_fn(eval_input_fn or train_input_fn), **adder.flags) return experiment_fn return def_experiment_fn ## Instruction: Fix help message of --min_eval_frequency flag ## Code After: import tensorflow as tf from .flag import FLAGS, FlagAdder from .estimator import def_estimator from .inputs import def_def_train_input_fn, def_def_eval_input_fn def def_def_experiment_fn(batch_inputs=True, prepare_filename_queues=True, distributed=False): adder = FlagAdder() for mode in [tf.contrib.learn.ModeKeys.TRAIN, tf.contrib.learn.ModeKeys.EVAL]: adder.add_flag("{}_steps".format(mode), type=int, help="Maximum number of {} steps".format(mode)) adder.add_flag("min_eval_frequency", type=int, default=1, help="Minimum evaluation frequency in number of train steps") estimator = def_estimator(distributed) def_train_input_fn = def_def_train_input_fn(batch_inputs, prepare_filename_queues) def_eval_input_fn = def_def_eval_input_fn(batch_inputs, prepare_filename_queues) def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None): def experiment_fn(output_dir): return tf.contrib.learn.Experiment( estimator(model_fn, output_dir), def_train_input_fn(train_input_fn), def_eval_input_fn(eval_input_fn or train_input_fn), **adder.flags) return experiment_fn return def_experiment_fn
import tensorflow as tf from .flag import FLAGS, FlagAdder from .estimator import def_estimator from .inputs import def_def_train_input_fn, def_def_eval_input_fn def def_def_experiment_fn(batch_inputs=True, prepare_filename_queues=True, distributed=False): adder = FlagAdder() for mode in [tf.contrib.learn.ModeKeys.TRAIN, tf.contrib.learn.ModeKeys.EVAL]: adder.add_flag("{}_steps".format(mode), type=int, help="Maximum number of {} steps".format(mode)) adder.add_flag("min_eval_frequency", type=int, default=1, - help="Minimum evaluation frequency in number of model " ? ^^^ ^^ + help="Minimum evaluation frequency in number of train steps") ? ^^^^^^^^ ^^ + - "savings") estimator = def_estimator(distributed) def_train_input_fn = def_def_train_input_fn(batch_inputs, prepare_filename_queues) def_eval_input_fn = def_def_eval_input_fn(batch_inputs, prepare_filename_queues) def def_experiment_fn(model_fn, train_input_fn, eval_input_fn=None): def experiment_fn(output_dir): return tf.contrib.learn.Experiment( estimator(model_fn, output_dir), def_train_input_fn(train_input_fn), def_eval_input_fn(eval_input_fn or train_input_fn), **adder.flags) return experiment_fn return def_experiment_fn
6b819174557a1dffbcb397dc1d6e2a3f7e01a12b
milestones/migrations/0002_data__seed_relationship_types.py
milestones/migrations/0002_data__seed_relationship_types.py
from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.using(db_alias).get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.using(db_alias).filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
Remove uses of using() from migrations
Remove uses of using() from migrations This hardcoded the db_alias fetched from schema_editor and forces django to try and migrate any second database you use, rather than routing to the default database. In testing a build from scratch, these do not appear needed. Using using() prevents us from using multiple databases behind edxapp
Python
agpl-3.0
edx/edx-milestones
from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") - db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): - MilestoneRelationshipType.objects.using(db_alias).get_or_create( + MilestoneRelationshipType.objects.get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") - db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): - MilestoneRelationshipType.objects.using(db_alias).filter(name=name).delete() + MilestoneRelationshipType.objects.filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
Remove uses of using() from migrations
## Code Before: from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.using(db_alias).get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.using(db_alias).filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ] ## Instruction: Remove uses of using() from migrations ## Code After: from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") for name in fetch_milestone_relationship_types().values(): MilestoneRelationshipType.objects.filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
from __future__ import unicode_literals from django.db import migrations, models from milestones.data import fetch_milestone_relationship_types def seed_relationship_types(apps, schema_editor): """Seed the relationship types.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") - db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): - MilestoneRelationshipType.objects.using(db_alias).get_or_create( ? ---------------- + MilestoneRelationshipType.objects.get_or_create( name=name, description='Autogenerated milestone relationship type "{}"'.format(name), ) def delete_relationship_types(apps, schema_editor): """Clean up any relationships we made.""" MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType") - db_alias = schema_editor.connection.alias for name in fetch_milestone_relationship_types().values(): - MilestoneRelationshipType.objects.using(db_alias).filter(name=name).delete() ? ---------------- + MilestoneRelationshipType.objects.filter(name=name).delete() class Migration(migrations.Migration): dependencies = [ ('milestones', '0001_initial'), ] operations = [ migrations.RunPython(seed_relationship_types, delete_relationship_types), ]
4a24e19c160535c7a65c7f3f11748e6048386038
examples/gst/wavenc.py
examples/gst/wavenc.py
import sys import gst def decode(filename): output = filename + '.wav' pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' 'filesink location="%s"') % (filename, output) bin = gst.parse_launch(pipeline) bin.set_state(gst.STATE_PLAYING) while bin.iterate(): pass bin.set_state(gst.STATE_NULL) def main(args): for arg in args[1:]: decode(arg) if __name__ == '__main__': sys.exit(main(sys.argv))
import sys import gst def decode(filename): output = filename + '.wav' pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' 'filesink location="%s" }') % (filename, output) bin = gst.parse_launch(pipeline) bin.set_state(gst.STATE_PLAYING) bin.connect('eos', lambda bin: gst.main_quit()) gst.main() def main(args): for arg in args[1:]: decode(arg) if __name__ == '__main__': sys.exit(main(sys.argv))
Put it in a thread and run it in a mainloop
Put it in a thread and run it in a mainloop Original commit message from CVS: Put it in a thread and run it in a mainloop
Python
lgpl-2.1
lubosz/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,lubosz/gst-python,alessandrod/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,GStreamer/gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,GStreamer/gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,alessandrod/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,GStreamer/gst-python,lubosz/gst-python,freedesktop-unofficial-mirror/gstreamer__gst-python,pexip/gst-python,freedesktop-unofficial-mirror/gstreamer-sdk__gst-python,alessandrod/gst-python
import sys import gst def decode(filename): output = filename + '.wav' - pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' + pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' - 'filesink location="%s"') % (filename, output) + 'filesink location="%s" }') % (filename, output) bin = gst.parse_launch(pipeline) bin.set_state(gst.STATE_PLAYING) + bin.connect('eos', lambda bin: gst.main_quit()) + gst.main() - while bin.iterate(): - pass - bin.set_state(gst.STATE_NULL) def main(args): for arg in args[1:]: decode(arg) if __name__ == '__main__': sys.exit(main(sys.argv))
Put it in a thread and run it in a mainloop
## Code Before: import sys import gst def decode(filename): output = filename + '.wav' pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' 'filesink location="%s"') % (filename, output) bin = gst.parse_launch(pipeline) bin.set_state(gst.STATE_PLAYING) while bin.iterate(): pass bin.set_state(gst.STATE_NULL) def main(args): for arg in args[1:]: decode(arg) if __name__ == '__main__': sys.exit(main(sys.argv)) ## Instruction: Put it in a thread and run it in a mainloop ## Code After: import sys import gst def decode(filename): output = filename + '.wav' pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' 'filesink location="%s" }') % (filename, output) bin = gst.parse_launch(pipeline) bin.set_state(gst.STATE_PLAYING) bin.connect('eos', lambda bin: gst.main_quit()) gst.main() def main(args): for arg in args[1:]: decode(arg) if __name__ == '__main__': sys.exit(main(sys.argv))
import sys import gst def decode(filename): output = filename + '.wav' - pipeline = ('filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' + pipeline = ('{ filesrc location="%s" ! spider ! audio/x-raw-int,rate=44100,stereo=2 ! wavenc ! ' ? ++ - 'filesink location="%s"') % (filename, output) + 'filesink location="%s" }') % (filename, output) ? ++ bin = gst.parse_launch(pipeline) bin.set_state(gst.STATE_PLAYING) + bin.connect('eos', lambda bin: gst.main_quit()) + gst.main() - while bin.iterate(): - pass - bin.set_state(gst.STATE_NULL) def main(args): for arg in args[1:]: decode(arg) if __name__ == '__main__': sys.exit(main(sys.argv))
9bf70db96d8ae5204b20e1e214cb92e195ab5928
changes/api/build_flaky_tests.py
changes/api/build_flaky_tests.py
from __future__ import absolute_import from changes.api.base import APIView from changes.config import db from changes.constants import Result from changes.models import Build, Job, TestCase class BuildFlakyTestsAPIView(APIView): def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 jobs = list(Job.query.filter( Job.build_id == build.id, )) flaky_tests_query = db.session.query( TestCase.name ).filter( TestCase.job_id.in_([j.id for j in jobs]), TestCase.result == Result.passed, TestCase.reruns > 1 ).order_by(TestCase.name.asc()) flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query) context = { 'repositoryUrl': build.project.repository.url, 'flakyTests': { 'count': len(flaky_tests), 'items': flaky_tests } } return self.respond(context)
from __future__ import absolute_import from changes.api.base import APIView from changes.config import db from changes.constants import Result from changes.models import Build, Job, TestCase class BuildFlakyTestsAPIView(APIView): def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 jobs = list(Job.query.filter( Job.build_id == build.id, )) flaky_tests_query = db.session.query( TestCase.name ).filter( TestCase.job_id.in_([j.id for j in jobs]), TestCase.result == Result.passed, TestCase.reruns > 1 ).order_by(TestCase.name.asc()) flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query) context = { 'projectSlug': build.project.slug, 'repositoryUrl': build.project.repository.url, 'flakyTests': { 'count': len(flaky_tests), 'items': flaky_tests } } return self.respond(context)
Add projectSlug to build flaky tests API response
Add projectSlug to build flaky tests API response Summary: We will use it in the test quarantine service to whitelist projects which support quarantine. Test Plan: Tested locally. { "projectSlug": "changesjenkins", "repositoryUrl": "https://github.com/dropbox/changes.git", "flakyTests": { "count": 1, "items": [ { "name": "tests.account.test_account.AccountTest.test_account_change_language" } ] } } Reviewers: haoyi Reviewed By: haoyi Subscribers: changesbot, mkedia Differential Revision: https://tails.corp.dropbox.com/D123809
Python
apache-2.0
dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes
from __future__ import absolute_import from changes.api.base import APIView from changes.config import db from changes.constants import Result from changes.models import Build, Job, TestCase class BuildFlakyTestsAPIView(APIView): def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 jobs = list(Job.query.filter( Job.build_id == build.id, )) flaky_tests_query = db.session.query( TestCase.name ).filter( TestCase.job_id.in_([j.id for j in jobs]), TestCase.result == Result.passed, TestCase.reruns > 1 ).order_by(TestCase.name.asc()) flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query) context = { + 'projectSlug': build.project.slug, 'repositoryUrl': build.project.repository.url, 'flakyTests': { 'count': len(flaky_tests), 'items': flaky_tests } } return self.respond(context)
Add projectSlug to build flaky tests API response
## Code Before: from __future__ import absolute_import from changes.api.base import APIView from changes.config import db from changes.constants import Result from changes.models import Build, Job, TestCase class BuildFlakyTestsAPIView(APIView): def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 jobs = list(Job.query.filter( Job.build_id == build.id, )) flaky_tests_query = db.session.query( TestCase.name ).filter( TestCase.job_id.in_([j.id for j in jobs]), TestCase.result == Result.passed, TestCase.reruns > 1 ).order_by(TestCase.name.asc()) flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query) context = { 'repositoryUrl': build.project.repository.url, 'flakyTests': { 'count': len(flaky_tests), 'items': flaky_tests } } return self.respond(context) ## Instruction: Add projectSlug to build flaky tests API response ## Code After: from __future__ import absolute_import from changes.api.base import APIView from changes.config import db from changes.constants import Result from changes.models import Build, Job, TestCase class BuildFlakyTestsAPIView(APIView): def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 jobs = list(Job.query.filter( Job.build_id == build.id, )) flaky_tests_query = db.session.query( TestCase.name ).filter( TestCase.job_id.in_([j.id for j in jobs]), TestCase.result == Result.passed, TestCase.reruns > 1 ).order_by(TestCase.name.asc()) flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query) context = { 'projectSlug': build.project.slug, 'repositoryUrl': build.project.repository.url, 'flakyTests': { 'count': len(flaky_tests), 'items': flaky_tests } } return self.respond(context)
from __future__ import absolute_import from changes.api.base import APIView from changes.config import db from changes.constants import Result from changes.models import Build, Job, TestCase class BuildFlakyTestsAPIView(APIView): def get(self, build_id): build = Build.query.get(build_id) if build is None: return '', 404 jobs = list(Job.query.filter( Job.build_id == build.id, )) flaky_tests_query = db.session.query( TestCase.name ).filter( TestCase.job_id.in_([j.id for j in jobs]), TestCase.result == Result.passed, TestCase.reruns > 1 ).order_by(TestCase.name.asc()) flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query) context = { + 'projectSlug': build.project.slug, 'repositoryUrl': build.project.repository.url, 'flakyTests': { 'count': len(flaky_tests), 'items': flaky_tests } } return self.respond(context)
7648ac7ae01ee6cde8871128e162e8a4d5322b87
s3upload.py
s3upload.py
import sys import boto3 s3 = boto3.resource('s3') object = s3.Bucket('ictrp-data').upload_file(sys.argv[1], sys.argv[1]) object.Acl().put(ACL='public-read')
import sys import boto3 s3 = boto3.resource('s3') with open(sys.argv[1], 'rb') as f: object = s3.Bucket('ictrp-data').put_object(Key=sys.argv[1], Body=f) object.Acl().put(ACL='public-read')
Fix failing attempt to set ACL
Fix failing attempt to set ACL
Python
mit
gertvv/ictrp-retrieval,gertvv/ictrp-retrieval
import sys import boto3 s3 = boto3.resource('s3') - object = s3.Bucket('ictrp-data').upload_file(sys.argv[1], sys.argv[1]) + with open(sys.argv[1], 'rb') as f: + object = s3.Bucket('ictrp-data').put_object(Key=sys.argv[1], Body=f) - object.Acl().put(ACL='public-read') + object.Acl().put(ACL='public-read')
Fix failing attempt to set ACL
## Code Before: import sys import boto3 s3 = boto3.resource('s3') object = s3.Bucket('ictrp-data').upload_file(sys.argv[1], sys.argv[1]) object.Acl().put(ACL='public-read') ## Instruction: Fix failing attempt to set ACL ## Code After: import sys import boto3 s3 = boto3.resource('s3') with open(sys.argv[1], 'rb') as f: object = s3.Bucket('ictrp-data').put_object(Key=sys.argv[1], Body=f) object.Acl().put(ACL='public-read')
import sys import boto3 s3 = boto3.resource('s3') - object = s3.Bucket('ictrp-data').upload_file(sys.argv[1], sys.argv[1]) + with open(sys.argv[1], 'rb') as f: + object = s3.Bucket('ictrp-data').put_object(Key=sys.argv[1], Body=f) - object.Acl().put(ACL='public-read') + object.Acl().put(ACL='public-read') ? ++++
e5bda294e291a2d96b4f703a89128de9ee53a495
src/geelweb/django/editos/models.py
src/geelweb/django/editos/models.py
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
Add date_created and date_updated to editos.Edito model
Add date_created and date_updated to editos.Edito model
Python
mit
geelweb/django-editos,geelweb/django-editos
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) + date_created = models.DateTimeField(auto_now_add=True) + date_updated = models.DateTimeField(auto_now=True) + def __unicode__(self): return self.title
Add date_created and date_updated to editos.Edito model
## Code Before: from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) def __unicode__(self): return self.title ## Instruction: Add date_created and date_updated to editos.Edito model ## Code After: from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) date_created = models.DateTimeField(auto_now_add=True) date_updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title
from django.db import models from geelweb.django.editos import settings class Edito(models.Model): title = models.CharField(max_length=100) link = models.URLField() button_label = models.CharField(max_length=20, default="Go !", Required=False) image = models.FileField(upload_to="editos") text_content = models.CharField(max_length=400) display_from = models.DateField() display_until = models.DateField() active = models.BooleanField(default=True) text_theme = models.CharField(max_length=10, choices=settings.EDITOS_THEMES, default=settings.EDITOS_DEFAULT_THEME) + date_created = models.DateTimeField(auto_now_add=True) + date_updated = models.DateTimeField(auto_now=True) + def __unicode__(self): return self.title
2d1488669721a46350b5c0f06a049f5d4816f931
sauna/plugins/ext/disk.py
sauna/plugins/ext/disk.py
from sauna.plugins import PluginRegister from sauna.plugins.base import PsutilPlugin my_plugin = PluginRegister('Disk') @my_plugin.plugin() class Disk(PsutilPlugin): @my_plugin.check() def used_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): part_usage = self.psutil.disk_usage(part.mountpoint).percent status = self._value_to_status_less(part_usage, check_config) if status > 0: return ( status, 'Partition {} is full at {}%'.format(part.mountpoint, part_usage) ) return 0, 'Disk usage correct' @staticmethod def config_sample(): return ''' # Usage of disks Disk: checks: - type: used_percent warn: 80% crit: 90% '''
import os from sauna.plugins import PluginRegister from sauna.plugins.base import PsutilPlugin my_plugin = PluginRegister('Disk') @my_plugin.plugin() class Disk(PsutilPlugin): @my_plugin.check() def used_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): part_usage = self.psutil.disk_usage(part.mountpoint).percent status = self._value_to_status_less(part_usage, check_config) if status > 0: return ( status, 'Partition {} is full at {}%'.format(part.mountpoint, part_usage) ) return 0, 'Disk usage correct' @my_plugin.check() def used_inodes_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): s = os.statvfs(part.mountpoint) try: inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files) except ZeroDivisionError: continue status = self._value_to_status_less( inodes_usage, check_config, self._strip_percent_sign ) if status != self.STATUS_OK: return ( status, 'Partition {} uses {}% of inodes'.format(part.mountpoint, inodes_usage) ) return self.STATUS_OK, 'Inodes usage correct' @staticmethod def config_sample(): return ''' # Usage of disks Disk: checks: - type: used_percent warn: 80% crit: 90% - type: used_inodes_percent warn: 80% crit: 90% '''
Create Disk check to monitor inodes
Create Disk check to monitor inodes
Python
bsd-2-clause
bewiwi/sauna,NicolasLM/sauna,NicolasLM/sauna,bewiwi/sauna
+ import os + from sauna.plugins import PluginRegister from sauna.plugins.base import PsutilPlugin my_plugin = PluginRegister('Disk') @my_plugin.plugin() class Disk(PsutilPlugin): @my_plugin.check() def used_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): part_usage = self.psutil.disk_usage(part.mountpoint).percent status = self._value_to_status_less(part_usage, check_config) if status > 0: return ( status, 'Partition {} is full at {}%'.format(part.mountpoint, part_usage) ) return 0, 'Disk usage correct' + @my_plugin.check() + def used_inodes_percent(self, check_config): + check_config = self._strip_percent_sign_from_check_config(check_config) + for part in self.psutil.disk_partitions(all=False): + s = os.statvfs(part.mountpoint) + try: + inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files) + except ZeroDivisionError: + continue + status = self._value_to_status_less( + inodes_usage, check_config, self._strip_percent_sign + ) + if status != self.STATUS_OK: + return ( + status, + 'Partition {} uses {}% of inodes'.format(part.mountpoint, + inodes_usage) + ) + return self.STATUS_OK, 'Inodes usage correct' + @staticmethod def config_sample(): return ''' # Usage of disks Disk: checks: - type: used_percent warn: 80% crit: 90% + - type: used_inodes_percent + warn: 80% + crit: 90% '''
Create Disk check to monitor inodes
## Code Before: from sauna.plugins import PluginRegister from sauna.plugins.base import PsutilPlugin my_plugin = PluginRegister('Disk') @my_plugin.plugin() class Disk(PsutilPlugin): @my_plugin.check() def used_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): part_usage = self.psutil.disk_usage(part.mountpoint).percent status = self._value_to_status_less(part_usage, check_config) if status > 0: return ( status, 'Partition {} is full at {}%'.format(part.mountpoint, part_usage) ) return 0, 'Disk usage correct' @staticmethod def config_sample(): return ''' # Usage of disks Disk: checks: - type: used_percent warn: 80% crit: 90% ''' ## Instruction: Create Disk check to monitor inodes ## Code After: import os from sauna.plugins import PluginRegister from sauna.plugins.base import PsutilPlugin my_plugin = PluginRegister('Disk') @my_plugin.plugin() class Disk(PsutilPlugin): @my_plugin.check() def used_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): part_usage = self.psutil.disk_usage(part.mountpoint).percent status = self._value_to_status_less(part_usage, check_config) if status > 0: return ( status, 'Partition {} is full at {}%'.format(part.mountpoint, part_usage) ) return 0, 'Disk usage correct' @my_plugin.check() def used_inodes_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): s = os.statvfs(part.mountpoint) try: inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files) except ZeroDivisionError: continue status = self._value_to_status_less( inodes_usage, check_config, self._strip_percent_sign ) if status != self.STATUS_OK: return ( status, 'Partition {} uses {}% of inodes'.format(part.mountpoint, inodes_usage) ) return self.STATUS_OK, 'Inodes usage correct' @staticmethod def config_sample(): return ''' # Usage of disks Disk: checks: - type: used_percent warn: 80% crit: 90% - type: used_inodes_percent warn: 80% crit: 90% '''
+ import os + from sauna.plugins import PluginRegister from sauna.plugins.base import PsutilPlugin my_plugin = PluginRegister('Disk') @my_plugin.plugin() class Disk(PsutilPlugin): @my_plugin.check() def used_percent(self, check_config): check_config = self._strip_percent_sign_from_check_config(check_config) for part in self.psutil.disk_partitions(all=False): part_usage = self.psutil.disk_usage(part.mountpoint).percent status = self._value_to_status_less(part_usage, check_config) if status > 0: return ( status, 'Partition {} is full at {}%'.format(part.mountpoint, part_usage) ) return 0, 'Disk usage correct' + @my_plugin.check() + def used_inodes_percent(self, check_config): + check_config = self._strip_percent_sign_from_check_config(check_config) + for part in self.psutil.disk_partitions(all=False): + s = os.statvfs(part.mountpoint) + try: + inodes_usage = int((s.f_files - s.f_favail) * 100 / s.f_files) + except ZeroDivisionError: + continue + status = self._value_to_status_less( + inodes_usage, check_config, self._strip_percent_sign + ) + if status != self.STATUS_OK: + return ( + status, + 'Partition {} uses {}% of inodes'.format(part.mountpoint, + inodes_usage) + ) + return self.STATUS_OK, 'Inodes usage correct' + @staticmethod def config_sample(): return ''' # Usage of disks Disk: checks: - type: used_percent warn: 80% crit: 90% + - type: used_inodes_percent + warn: 80% + crit: 90% '''
6949339cda8c60b74341f854d9a00aa8abbfe4d5
test/level_sets_measure_test.py
test/level_sets_measure_test.py
__author__ = 'intsco' import cPickle from engine.pyIMS.image_measures.level_sets_measure import measure_of_chaos_dict from unittest import TestCase import unittest from os.path import join, realpath, dirname class MeasureOfChaosDictTest(TestCase): def setUp(self): self.rows, self.cols = 65, 65 self.input_fn = join(dirname(realpath(__file__)), 'data/measure_of_chaos_dict_test_input.pkl') with open(self.input_fn) as f: self.input_data = cPickle.load(f) def testMOCBoundaries(self): for img_d in self.input_data: if len(img_d) > 0: assert 0 <= measure_of_chaos_dict(img_d, self.rows, self.cols) <= 1 def testEmptyInput(self): # print measure_of_chaos_dict({}, self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, {}, self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, None, self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, (), self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, [], self.cols, self.cols) def testMaxInputDictKeyVal(self): max_key_val = self.rows * self.cols - 1 self.assertRaises(Exception, measure_of_chaos_dict, {max_key_val + 10: 1}, self.rows, self.cols) if __name__ == '__main__': unittest.main()
import unittest import numpy as np from ..image_measures.level_sets_measure import measure_of_chaos, _nan_to_zero class MeasureOfChaosTest(unittest.TestCase): def test__nan_to_zero_with_ge_zero(self): ids = ( np.zeros(1), np.ones(range(1, 10)), np.arange(1024 * 1024) ) for id_ in ids: before = id_.copy() _nan_to_zero(id_) np.testing.assert_array_equal(before, id_) def test__nan_to_zero_with_negatives(self): negs = ( np.array([-1]), -np.arange(1, 1024 * 1024 + 1).reshape((1024, 1024)), np.linspace(0, -20, 201) ) for neg in negs: sh = neg.shape _nan_to_zero(neg) np.testing.assert_array_equal(neg, np.zeros(sh)) if __name__ == '__main__': unittest.main()
Implement first tests for _nan_to_zero
Implement first tests for _nan_to_zero - Remove outdated dict test class - write some test methods
Python
apache-2.0
andy-d-palmer/pyIMS,alexandrovteam/pyImagingMSpec
- __author__ = 'intsco' + import unittest - import cPickle + import numpy as np + - from engine.pyIMS.image_measures.level_sets_measure import measure_of_chaos_dict + from ..image_measures.level_sets_measure import measure_of_chaos, _nan_to_zero - from unittest import TestCase - import unittest - from os.path import join, realpath, dirname - class MeasureOfChaosDictTest(TestCase): + class MeasureOfChaosTest(unittest.TestCase): + def test__nan_to_zero_with_ge_zero(self): + ids = ( + np.zeros(1), + np.ones(range(1, 10)), + np.arange(1024 * 1024) + ) + for id_ in ids: + before = id_.copy() + _nan_to_zero(id_) + np.testing.assert_array_equal(before, id_) + def test__nan_to_zero_with_negatives(self): + negs = ( + np.array([-1]), + -np.arange(1, 1024 * 1024 + 1).reshape((1024, 1024)), + np.linspace(0, -20, 201) + ) + for neg in negs: + sh = neg.shape + _nan_to_zero(neg) + np.testing.assert_array_equal(neg, np.zeros(sh)) - def setUp(self): - self.rows, self.cols = 65, 65 - self.input_fn = join(dirname(realpath(__file__)), 'data/measure_of_chaos_dict_test_input.pkl') - with open(self.input_fn) as f: - self.input_data = cPickle.load(f) - - def testMOCBoundaries(self): - for img_d in self.input_data: - if len(img_d) > 0: - assert 0 <= measure_of_chaos_dict(img_d, self.rows, self.cols) <= 1 - - def testEmptyInput(self): - # print measure_of_chaos_dict({}, self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, {}, self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, None, self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, (), self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, [], self.cols, self.cols) - - def testMaxInputDictKeyVal(self): - max_key_val = self.rows * self.cols - 1 - self.assertRaises(Exception, measure_of_chaos_dict, {max_key_val + 10: 1}, self.rows, self.cols) - if __name__ == '__main__': unittest.main()
Implement first tests for _nan_to_zero
## Code Before: __author__ = 'intsco' import cPickle from engine.pyIMS.image_measures.level_sets_measure import measure_of_chaos_dict from unittest import TestCase import unittest from os.path import join, realpath, dirname class MeasureOfChaosDictTest(TestCase): def setUp(self): self.rows, self.cols = 65, 65 self.input_fn = join(dirname(realpath(__file__)), 'data/measure_of_chaos_dict_test_input.pkl') with open(self.input_fn) as f: self.input_data = cPickle.load(f) def testMOCBoundaries(self): for img_d in self.input_data: if len(img_d) > 0: assert 0 <= measure_of_chaos_dict(img_d, self.rows, self.cols) <= 1 def testEmptyInput(self): # print measure_of_chaos_dict({}, self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, {}, self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, None, self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, (), self.cols, self.cols) self.assertRaises(Exception, measure_of_chaos_dict, [], self.cols, self.cols) def testMaxInputDictKeyVal(self): max_key_val = self.rows * self.cols - 1 self.assertRaises(Exception, measure_of_chaos_dict, {max_key_val + 10: 1}, self.rows, self.cols) if __name__ == '__main__': unittest.main() ## Instruction: Implement first tests for _nan_to_zero ## Code After: import unittest import numpy as np from ..image_measures.level_sets_measure import measure_of_chaos, _nan_to_zero class MeasureOfChaosTest(unittest.TestCase): def test__nan_to_zero_with_ge_zero(self): ids = ( np.zeros(1), np.ones(range(1, 10)), np.arange(1024 * 1024) ) for id_ in ids: before = id_.copy() _nan_to_zero(id_) np.testing.assert_array_equal(before, id_) def test__nan_to_zero_with_negatives(self): negs = ( np.array([-1]), -np.arange(1, 1024 * 1024 + 1).reshape((1024, 1024)), np.linspace(0, -20, 201) ) for neg in negs: sh = neg.shape _nan_to_zero(neg) np.testing.assert_array_equal(neg, np.zeros(sh)) if __name__ == '__main__': unittest.main()
- __author__ = 'intsco' + import unittest - import cPickle + import numpy as np + - from engine.pyIMS.image_measures.level_sets_measure import measure_of_chaos_dict ? ------ ----- ^^^ + from ..image_measures.level_sets_measure import measure_of_chaos, _nan_to_zero ? ++ ^^^^ ++++++ - from unittest import TestCase - import unittest - from os.path import join, realpath, dirname - class MeasureOfChaosDictTest(TestCase): ? ---- + class MeasureOfChaosTest(unittest.TestCase): ? +++++++++ + def test__nan_to_zero_with_ge_zero(self): + ids = ( + np.zeros(1), + np.ones(range(1, 10)), + np.arange(1024 * 1024) + ) + for id_ in ids: + before = id_.copy() + _nan_to_zero(id_) + np.testing.assert_array_equal(before, id_) + def test__nan_to_zero_with_negatives(self): + negs = ( + np.array([-1]), + -np.arange(1, 1024 * 1024 + 1).reshape((1024, 1024)), + np.linspace(0, -20, 201) + ) + for neg in negs: + sh = neg.shape + _nan_to_zero(neg) + np.testing.assert_array_equal(neg, np.zeros(sh)) - def setUp(self): - self.rows, self.cols = 65, 65 - self.input_fn = join(dirname(realpath(__file__)), 'data/measure_of_chaos_dict_test_input.pkl') - with open(self.input_fn) as f: - self.input_data = cPickle.load(f) - - def testMOCBoundaries(self): - for img_d in self.input_data: - if len(img_d) > 0: - assert 0 <= measure_of_chaos_dict(img_d, self.rows, self.cols) <= 1 - - def testEmptyInput(self): - # print measure_of_chaos_dict({}, self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, {}, self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, None, self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, (), self.cols, self.cols) - self.assertRaises(Exception, measure_of_chaos_dict, [], self.cols, self.cols) - - def testMaxInputDictKeyVal(self): - max_key_val = self.rows * self.cols - 1 - self.assertRaises(Exception, measure_of_chaos_dict, {max_key_val + 10: 1}, self.rows, self.cols) - if __name__ == '__main__': unittest.main()
ce380319562eb94e252c74de7b6b1ac18a357466
chainer/training/extensions/value_observation.py
chainer/training/extensions/value_observation.py
import time from chainer.training import extension def observe_value(key, target_func): """Returns a trainer extension to continuously record a value. Args: key (str): Key of observation to record. target_func (function): Function that returns the value to record. It must take one argument: trainer object. Returns: The extension function. """ @extension.make_extension( trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[key] = target_func(trainer) return _observe_value def observe_time(key='time'): """Returns a trainer extension to record the elapsed time. Args: key (str): Key of observation to record. Returns: The extension function. """ start_time = time.time() return observe_value(key, lambda _: time.time() - start_time) def observe_lr(optimizer, key='lr'): """Returns a trainer extension to record the learning rate. Args: optimizer: Optimizer object whose learning rate is recorded. key (str): Key of observation to record. Returns: The extension function. """ return observe_value(key, lambda _: optimizer.lr)
import time from chainer.training import extension def observe_value(key, target_func): """Returns a trainer extension to continuously record a value. Args: key (str): Key of observation to record. target_func (function): Function that returns the value to record. It must take one argument: :class:~chainer.training.Trainer object. Returns: The extension function. """ @extension.make_extension( trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[key] = target_func(trainer) return _observe_value def observe_time(key='time'): """Returns a trainer extension to record the elapsed time. Args: key (str): Key of observation to record. Returns: The extension function. """ start_time = time.time() return observe_value(key, lambda _: time.time() - start_time) def observe_lr(optimizer, key='lr'): """Returns a trainer extension to record the learning rate. Args: optimizer (~chainer.Optimizer): Optimizer object whose learning rate is recorded. key (str): Key of observation to record. Returns: The extension function. """ return observe_value(key, lambda _: optimizer.lr)
Add links for the document
Add links for the document
Python
mit
ktnyt/chainer,hvy/chainer,aonotas/chainer,niboshi/chainer,okuta/chainer,chainer/chainer,anaruse/chainer,niboshi/chainer,ronekko/chainer,okuta/chainer,jnishi/chainer,wkentaro/chainer,cupy/cupy,chainer/chainer,okuta/chainer,wkentaro/chainer,jnishi/chainer,delta2323/chainer,rezoo/chainer,hvy/chainer,hvy/chainer,jnishi/chainer,pfnet/chainer,niboshi/chainer,keisuke-umezawa/chainer,tkerola/chainer,jnishi/chainer,chainer/chainer,cupy/cupy,keisuke-umezawa/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,ktnyt/chainer,okuta/chainer,ysekky/chainer,wkentaro/chainer,cupy/cupy,wkentaro/chainer,chainer/chainer,niboshi/chainer,kiyukuta/chainer,ktnyt/chainer,cupy/cupy,ktnyt/chainer,kashif/chainer,hvy/chainer
import time from chainer.training import extension def observe_value(key, target_func): """Returns a trainer extension to continuously record a value. Args: key (str): Key of observation to record. target_func (function): Function that returns the value to record. - It must take one argument: trainer object. + It must take one argument: :class:~chainer.training.Trainer object. Returns: The extension function. """ @extension.make_extension( trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[key] = target_func(trainer) return _observe_value def observe_time(key='time'): """Returns a trainer extension to record the elapsed time. Args: key (str): Key of observation to record. Returns: The extension function. """ start_time = time.time() return observe_value(key, lambda _: time.time() - start_time) def observe_lr(optimizer, key='lr'): """Returns a trainer extension to record the learning rate. Args: - optimizer: Optimizer object whose learning rate is recorded. + optimizer (~chainer.Optimizer): Optimizer object whose learning rate is recorded. key (str): Key of observation to record. Returns: The extension function. """ return observe_value(key, lambda _: optimizer.lr)
Add links for the document
## Code Before: import time from chainer.training import extension def observe_value(key, target_func): """Returns a trainer extension to continuously record a value. Args: key (str): Key of observation to record. target_func (function): Function that returns the value to record. It must take one argument: trainer object. Returns: The extension function. """ @extension.make_extension( trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[key] = target_func(trainer) return _observe_value def observe_time(key='time'): """Returns a trainer extension to record the elapsed time. Args: key (str): Key of observation to record. Returns: The extension function. """ start_time = time.time() return observe_value(key, lambda _: time.time() - start_time) def observe_lr(optimizer, key='lr'): """Returns a trainer extension to record the learning rate. Args: optimizer: Optimizer object whose learning rate is recorded. key (str): Key of observation to record. Returns: The extension function. """ return observe_value(key, lambda _: optimizer.lr) ## Instruction: Add links for the document ## Code After: import time from chainer.training import extension def observe_value(key, target_func): """Returns a trainer extension to continuously record a value. Args: key (str): Key of observation to record. target_func (function): Function that returns the value to record. It must take one argument: :class:~chainer.training.Trainer object. Returns: The extension function. """ @extension.make_extension( trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[key] = target_func(trainer) return _observe_value def observe_time(key='time'): """Returns a trainer extension to record the elapsed time. Args: key (str): Key of observation to record. Returns: The extension function. """ start_time = time.time() return observe_value(key, lambda _: time.time() - start_time) def observe_lr(optimizer, key='lr'): """Returns a trainer extension to record the learning rate. Args: optimizer (~chainer.Optimizer): Optimizer object whose learning rate is recorded. key (str): Key of observation to record. Returns: The extension function. """ return observe_value(key, lambda _: optimizer.lr)
import time from chainer.training import extension def observe_value(key, target_func): """Returns a trainer extension to continuously record a value. Args: key (str): Key of observation to record. target_func (function): Function that returns the value to record. - It must take one argument: trainer object. + It must take one argument: :class:~chainer.training.Trainer object. ? ++++++++++++++++ +++++++++ Returns: The extension function. """ @extension.make_extension( trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER) def _observe_value(trainer): trainer.observation[key] = target_func(trainer) return _observe_value def observe_time(key='time'): """Returns a trainer extension to record the elapsed time. Args: key (str): Key of observation to record. Returns: The extension function. """ start_time = time.time() return observe_value(key, lambda _: time.time() - start_time) def observe_lr(optimizer, key='lr'): """Returns a trainer extension to record the learning rate. Args: - optimizer: Optimizer object whose learning rate is recorded. + optimizer (~chainer.Optimizer): Optimizer object whose learning rate is recorded. ? +++++++++++++++++++++ key (str): Key of observation to record. Returns: The extension function. """ return observe_value(key, lambda _: optimizer.lr)
a91a04af6b95fa600a0b3ce74b5fffc07ecf590e
polymorphic/__init__.py
polymorphic/__init__.py
# See PEP 440 (https://www.python.org/dev/peps/pep-0440/) __version__ = "1.3"
import pkg_resources __version__ = pkg_resources.require("django-polymorphic")[0].version
Set polymorphic.__version__ from setuptools metadata
Set polymorphic.__version__ from setuptools metadata
Python
bsd-3-clause
skirsdeda/django_polymorphic,skirsdeda/django_polymorphic,skirsdeda/django_polymorphic,chrisglass/django_polymorphic,chrisglass/django_polymorphic
+ import pkg_resources - # See PEP 440 (https://www.python.org/dev/peps/pep-0440/) - __version__ = "1.3" + + __version__ = pkg_resources.require("django-polymorphic")[0].version +
Set polymorphic.__version__ from setuptools metadata
## Code Before: # See PEP 440 (https://www.python.org/dev/peps/pep-0440/) __version__ = "1.3" ## Instruction: Set polymorphic.__version__ from setuptools metadata ## Code After: import pkg_resources __version__ = pkg_resources.require("django-polymorphic")[0].version
- # See PEP 440 (https://www.python.org/dev/peps/pep-0440/) - __version__ = "1.3" + import pkg_resources + + + __version__ = pkg_resources.require("django-polymorphic")[0].version
5dd37364a9616d42291b8841552bdd24aebd908a
selective_search.py
selective_search.py
import numpy import segment def calc_adjacency_matrix(label_img, n_region): adjacency = numpy.diag([1] * n_region) h, w = label_img.shape[0], label_img.shape[1] for y in range(h): for x in range(w): here = label_img[y, x] if y < h - 1: b = label_img[y + 1, x] adjacency[here, b] = adjacency[b, here] = 1 if x < w - 1: r = label_img[y, x + 1] adjacency[here, r] = adjacency[r, here] = 1 return adjacency
import numpy import segment def calc_adjacency_matrix(label_img, n_region): A = numpy.diag([1] * n_region) h, w = label_img.shape[0], label_img.shape[1] for y in range(h): for x in range(w): here = label_img[y, x] if y < h - 1: b = label_img[y + 1, x] A[here, b] = A[b, here] = 1 if x < w - 1: r = label_img[y, x + 1] A[here, r] = A[r, here] = 1 return A
Change name of local variable
Change name of local variable
Python
mit
belltailjp/selective_search_py,belltailjp/selective_search_py,gamer13/selective_search_py,BradNeuberg/selective_search_py,gamer13/selective_search_py,BradNeuberg/selective_search_py
import numpy import segment def calc_adjacency_matrix(label_img, n_region): - adjacency = numpy.diag([1] * n_region) + A = numpy.diag([1] * n_region) h, w = label_img.shape[0], label_img.shape[1] for y in range(h): for x in range(w): here = label_img[y, x] if y < h - 1: b = label_img[y + 1, x] - adjacency[here, b] = adjacency[b, here] = 1 + A[here, b] = A[b, here] = 1 if x < w - 1: r = label_img[y, x + 1] - adjacency[here, r] = adjacency[r, here] = 1 + A[here, r] = A[r, here] = 1 - return adjacency + return A
Change name of local variable
## Code Before: import numpy import segment def calc_adjacency_matrix(label_img, n_region): adjacency = numpy.diag([1] * n_region) h, w = label_img.shape[0], label_img.shape[1] for y in range(h): for x in range(w): here = label_img[y, x] if y < h - 1: b = label_img[y + 1, x] adjacency[here, b] = adjacency[b, here] = 1 if x < w - 1: r = label_img[y, x + 1] adjacency[here, r] = adjacency[r, here] = 1 return adjacency ## Instruction: Change name of local variable ## Code After: import numpy import segment def calc_adjacency_matrix(label_img, n_region): A = numpy.diag([1] * n_region) h, w = label_img.shape[0], label_img.shape[1] for y in range(h): for x in range(w): here = label_img[y, x] if y < h - 1: b = label_img[y + 1, x] A[here, b] = A[b, here] = 1 if x < w - 1: r = label_img[y, x + 1] A[here, r] = A[r, here] = 1 return A
import numpy import segment def calc_adjacency_matrix(label_img, n_region): - adjacency = numpy.diag([1] * n_region) ? ^^^^^^^^^ + A = numpy.diag([1] * n_region) ? ^ h, w = label_img.shape[0], label_img.shape[1] for y in range(h): for x in range(w): here = label_img[y, x] if y < h - 1: b = label_img[y + 1, x] - adjacency[here, b] = adjacency[b, here] = 1 ? ^^^^^^^^^ ^^^^^^^^^ + A[here, b] = A[b, here] = 1 ? ^ ^ if x < w - 1: r = label_img[y, x + 1] - adjacency[here, r] = adjacency[r, here] = 1 ? ^^^^^^^^^ ^^^^^^^^^ + A[here, r] = A[r, here] = 1 ? ^ ^ - return adjacency + return A
f4c56937caacb4709847d67752f4ff3cba4568f6
tests/test_it.py
tests/test_it.py
import os import shutil import deck2pdf from pytest import raises from . import ( current_dir, test_dir, skip_in_ci, ) class TestForMain(object): def setUp(self): shutil.rmtree(os.path.join(current_dir, '.deck2pdf'), ignore_errors=True) def test_help(self): raises(SystemExit, deck2pdf.main, []) raises(SystemExit, deck2pdf.main, ['-h']) @skip_in_ci def test_files(self): test_slide_path = os.path.join(test_dir, 'testslide/_build/slides/index.html') deck2pdf.main([test_slide_path, ]) assert os.path.exists(os.path.join(current_dir, '.deck2pdf'))
import os import shutil import deck2pdf from pytest import raises from . import ( current_dir, test_dir, ) class TestForMain(object): def setUp(self): shutil.rmtree(os.path.join(current_dir, '.deck2pdf'), ignore_errors=True) def test_help(self): raises(SystemExit, deck2pdf.main, []) raises(SystemExit, deck2pdf.main, ['-h']) def test_files(self): test_slide_path = os.path.join(test_dir, 'testslide/_build/slides/index.html') deck2pdf.main([test_slide_path, '-c', 'stub']) assert os.path.exists(os.path.join(current_dir, '.deck2pdf'))
Remove decorator 'skip_in_ci' from test_files
Remove decorator 'skip_in_ci' from test_files Because implement stub of capture engine, 'Output slides pdf' test can run in CircleCI
Python
mit
attakei/deck2pdf-python,attakei/deck2pdf-python,attakei/slide2pdf,attakei/deck2pdf,attakei/slide2pdf,attakei/deck2pdf
import os import shutil import deck2pdf from pytest import raises from . import ( current_dir, test_dir, - skip_in_ci, ) class TestForMain(object): def setUp(self): shutil.rmtree(os.path.join(current_dir, '.deck2pdf'), ignore_errors=True) def test_help(self): raises(SystemExit, deck2pdf.main, []) raises(SystemExit, deck2pdf.main, ['-h']) - @skip_in_ci def test_files(self): test_slide_path = os.path.join(test_dir, 'testslide/_build/slides/index.html') - deck2pdf.main([test_slide_path, ]) + deck2pdf.main([test_slide_path, '-c', 'stub']) assert os.path.exists(os.path.join(current_dir, '.deck2pdf'))
Remove decorator 'skip_in_ci' from test_files
## Code Before: import os import shutil import deck2pdf from pytest import raises from . import ( current_dir, test_dir, skip_in_ci, ) class TestForMain(object): def setUp(self): shutil.rmtree(os.path.join(current_dir, '.deck2pdf'), ignore_errors=True) def test_help(self): raises(SystemExit, deck2pdf.main, []) raises(SystemExit, deck2pdf.main, ['-h']) @skip_in_ci def test_files(self): test_slide_path = os.path.join(test_dir, 'testslide/_build/slides/index.html') deck2pdf.main([test_slide_path, ]) assert os.path.exists(os.path.join(current_dir, '.deck2pdf')) ## Instruction: Remove decorator 'skip_in_ci' from test_files ## Code After: import os import shutil import deck2pdf from pytest import raises from . import ( current_dir, test_dir, ) class TestForMain(object): def setUp(self): shutil.rmtree(os.path.join(current_dir, '.deck2pdf'), ignore_errors=True) def test_help(self): raises(SystemExit, deck2pdf.main, []) raises(SystemExit, deck2pdf.main, ['-h']) def test_files(self): test_slide_path = os.path.join(test_dir, 'testslide/_build/slides/index.html') deck2pdf.main([test_slide_path, '-c', 'stub']) assert os.path.exists(os.path.join(current_dir, '.deck2pdf'))
import os import shutil import deck2pdf from pytest import raises from . import ( current_dir, test_dir, - skip_in_ci, ) class TestForMain(object): def setUp(self): shutil.rmtree(os.path.join(current_dir, '.deck2pdf'), ignore_errors=True) def test_help(self): raises(SystemExit, deck2pdf.main, []) raises(SystemExit, deck2pdf.main, ['-h']) - @skip_in_ci def test_files(self): test_slide_path = os.path.join(test_dir, 'testslide/_build/slides/index.html') - deck2pdf.main([test_slide_path, ]) + deck2pdf.main([test_slide_path, '-c', 'stub']) ? ++++++++++++ assert os.path.exists(os.path.join(current_dir, '.deck2pdf'))
1b0fdfdc2ff49d6dfc7d239b5a9cda1ff334f20b
candidates/cache.py
candidates/cache.py
from django.core.cache import cache def post_cache_key(mapit_area_id): """Form the cache key used for post data""" return "post:{0}".format(mapit_area_id) def invalidate_posts(post_ids): for post_id in post_ids: post_key = post_cache_key(post_id) cache.delete(post_key) def get_post_cached(api, mapit_area_id): post_key = post_cache_key(mapit_area_id) result_from_cache = cache.get(post_key) if result_from_cache is not None: return result_from_cache mp_post = api.posts(mapit_area_id).get( embed='membership.person.membership.organization') cache.set(post_key, mp_post, None) return mp_post
from django.core.cache import cache def post_cache_key(mapit_area_id): """Form the cache key used for post data""" return "post:{0}".format(mapit_area_id) def person_cache_key(person_id): """Form the cache key used for person data""" return "person:{0}".format(person_id) def invalidate_posts(post_ids): """Delete cache entries for all of these PopIt posts""" cache.delete_many(post_cache_key(post_id) for post_id in post_ids) def invalidate_person(person_id): """Delete the cache entry for a particular person's PopIt data""" person_key = person_cache_key(person_id) cache.delete(person_key) def get_post_cached(api, mapit_area_id): post_key = post_cache_key(mapit_area_id) result_from_cache = cache.get(post_key) if result_from_cache is not None: return result_from_cache mp_post = api.posts(mapit_area_id).get( embed='membership.person.membership.organization') # Add posts data with an indefinite time-out (we should be # invalidating the cached on any change). cache.set(post_key, mp_post, None) return mp_post def get_person_cached(api, person_id): person_key = person_cache_key(person_id) result_from_cache = cache.get(person_key) if result_from_cache is not None: return result_from_cache person_data = api.persons(person_id).get( embed='membership.organization' ) # Add it the person data to the cache with a timeout of # a day. cache.set(person_key, person_data, 86400) return person_data
Add functions for caching person data from PopIt
Add functions for caching person data from PopIt
Python
agpl-3.0
mysociety/yournextrepresentative,openstate/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,neavouli/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,openstate/yournextrepresentative
from django.core.cache import cache def post_cache_key(mapit_area_id): """Form the cache key used for post data""" return "post:{0}".format(mapit_area_id) + def person_cache_key(person_id): + """Form the cache key used for person data""" + return "person:{0}".format(person_id) + def invalidate_posts(post_ids): - for post_id in post_ids: - post_key = post_cache_key(post_id) + """Delete cache entries for all of these PopIt posts""" + cache.delete_many(post_cache_key(post_id) for post_id in post_ids) + + def invalidate_person(person_id): + """Delete the cache entry for a particular person's PopIt data""" + person_key = person_cache_key(person_id) - cache.delete(post_key) + cache.delete(person_key) def get_post_cached(api, mapit_area_id): post_key = post_cache_key(mapit_area_id) result_from_cache = cache.get(post_key) if result_from_cache is not None: return result_from_cache mp_post = api.posts(mapit_area_id).get( embed='membership.person.membership.organization') + # Add posts data with an indefinite time-out (we should be + # invalidating the cached on any change). cache.set(post_key, mp_post, None) return mp_post + def get_person_cached(api, person_id): + person_key = person_cache_key(person_id) + result_from_cache = cache.get(person_key) + if result_from_cache is not None: + return result_from_cache + person_data = api.persons(person_id).get( + embed='membership.organization' + ) + # Add it the person data to the cache with a timeout of + # a day. + cache.set(person_key, person_data, 86400) + return person_data +
Add functions for caching person data from PopIt
## Code Before: from django.core.cache import cache def post_cache_key(mapit_area_id): """Form the cache key used for post data""" return "post:{0}".format(mapit_area_id) def invalidate_posts(post_ids): for post_id in post_ids: post_key = post_cache_key(post_id) cache.delete(post_key) def get_post_cached(api, mapit_area_id): post_key = post_cache_key(mapit_area_id) result_from_cache = cache.get(post_key) if result_from_cache is not None: return result_from_cache mp_post = api.posts(mapit_area_id).get( embed='membership.person.membership.organization') cache.set(post_key, mp_post, None) return mp_post ## Instruction: Add functions for caching person data from PopIt ## Code After: from django.core.cache import cache def post_cache_key(mapit_area_id): """Form the cache key used for post data""" return "post:{0}".format(mapit_area_id) def person_cache_key(person_id): """Form the cache key used for person data""" return "person:{0}".format(person_id) def invalidate_posts(post_ids): """Delete cache entries for all of these PopIt posts""" cache.delete_many(post_cache_key(post_id) for post_id in post_ids) def invalidate_person(person_id): """Delete the cache entry for a particular person's PopIt data""" person_key = person_cache_key(person_id) cache.delete(person_key) def get_post_cached(api, mapit_area_id): post_key = post_cache_key(mapit_area_id) result_from_cache = cache.get(post_key) if result_from_cache is not None: return result_from_cache mp_post = api.posts(mapit_area_id).get( embed='membership.person.membership.organization') # Add posts data with an indefinite time-out (we should be # invalidating the cached on any change). cache.set(post_key, mp_post, None) return mp_post def get_person_cached(api, person_id): person_key = person_cache_key(person_id) result_from_cache = cache.get(person_key) if result_from_cache is not None: return result_from_cache person_data = api.persons(person_id).get( embed='membership.organization' ) # Add it the person data to the cache with a timeout of # a day. cache.set(person_key, person_data, 86400) return person_data
from django.core.cache import cache def post_cache_key(mapit_area_id): """Form the cache key used for post data""" return "post:{0}".format(mapit_area_id) + def person_cache_key(person_id): + """Form the cache key used for person data""" + return "person:{0}".format(person_id) + def invalidate_posts(post_ids): - for post_id in post_ids: - post_key = post_cache_key(post_id) + """Delete cache entries for all of these PopIt posts""" + cache.delete_many(post_cache_key(post_id) for post_id in post_ids) + + def invalidate_person(person_id): + """Delete the cache entry for a particular person's PopIt data""" + person_key = person_cache_key(person_id) - cache.delete(post_key) ? ---- ^^ + cache.delete(person_key) ? +++ ^ def get_post_cached(api, mapit_area_id): post_key = post_cache_key(mapit_area_id) result_from_cache = cache.get(post_key) if result_from_cache is not None: return result_from_cache mp_post = api.posts(mapit_area_id).get( embed='membership.person.membership.organization') + # Add posts data with an indefinite time-out (we should be + # invalidating the cached on any change). cache.set(post_key, mp_post, None) return mp_post + + def get_person_cached(api, person_id): + person_key = person_cache_key(person_id) + result_from_cache = cache.get(person_key) + if result_from_cache is not None: + return result_from_cache + person_data = api.persons(person_id).get( + embed='membership.organization' + ) + # Add it the person data to the cache with a timeout of + # a day. + cache.set(person_key, person_data, 86400) + return person_data
58c97445c8d55d48e03498c758f7b7c6dee245aa
enabled/_50_admin_add_monitoring_panel.py
enabled/_50_admin_add_monitoring_panel.py
PANEL = 'monitoring' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'overcloud' # The name of the panel group the PANEL is associated with. #PANEL_GROUP = 'admin' # Python panel class of the PANEL to be added. ADD_PANEL = \ 'monitoring.panel.Monitoring' # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['monitoring']
PANEL = 'monitoring' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'overcloud' # The name of the panel group the PANEL is associated with. #PANEL_GROUP = 'admin' DEFAULT_PANEL = 'monitoring' # Python panel class of the PANEL to be added. ADD_PANEL = \ 'monitoring.panel.Monitoring' # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['monitoring'] # A list of angular modules to be added as dependencies to horizon app. #ADD_ANGULAR_MODULE = ['monitoringApp']
Set DEFAULT_PANEL to monitoring panel
Set DEFAULT_PANEL to monitoring panel
Python
apache-2.0
stackforge/monasca-ui,openstack/monasca-ui,openstack/monasca-ui,stackforge/monasca-ui,openstack/monasca-ui,openstack/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui
PANEL = 'monitoring' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'overcloud' # The name of the panel group the PANEL is associated with. #PANEL_GROUP = 'admin' + + DEFAULT_PANEL = 'monitoring' # Python panel class of the PANEL to be added. ADD_PANEL = \ 'monitoring.panel.Monitoring' # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['monitoring'] + # A list of angular modules to be added as dependencies to horizon app. + #ADD_ANGULAR_MODULE = ['monitoringApp'] +
Set DEFAULT_PANEL to monitoring panel
## Code Before: PANEL = 'monitoring' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'overcloud' # The name of the panel group the PANEL is associated with. #PANEL_GROUP = 'admin' # Python panel class of the PANEL to be added. ADD_PANEL = \ 'monitoring.panel.Monitoring' # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['monitoring'] ## Instruction: Set DEFAULT_PANEL to monitoring panel ## Code After: PANEL = 'monitoring' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'overcloud' # The name of the panel group the PANEL is associated with. #PANEL_GROUP = 'admin' DEFAULT_PANEL = 'monitoring' # Python panel class of the PANEL to be added. ADD_PANEL = \ 'monitoring.panel.Monitoring' # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['monitoring'] # A list of angular modules to be added as dependencies to horizon app. #ADD_ANGULAR_MODULE = ['monitoringApp']
PANEL = 'monitoring' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'overcloud' # The name of the panel group the PANEL is associated with. #PANEL_GROUP = 'admin' + + DEFAULT_PANEL = 'monitoring' # Python panel class of the PANEL to be added. ADD_PANEL = \ 'monitoring.panel.Monitoring' # A list of applications to be added to INSTALLED_APPS. ADD_INSTALLED_APPS = ['monitoring'] + # A list of angular modules to be added as dependencies to horizon app. + #ADD_ANGULAR_MODULE = ['monitoringApp'] +
3035521c5a8e04b8eeb6874d8769dd5859747d53
devpi_builder/cli.py
devpi_builder/cli.py
import argparse from devpi_builder import requirements, wheeler, devpi def main(args=None): parser = argparse.ArgumentParser(description='Create wheels for all given project versions and upload them to the given index.') parser.add_argument('requirements', help='requirements.txt style file specifying which project versions to package.') parser.add_argument('index', help='The index to upload the packaged software to.') parser.add_argument('user', help='The user to log in as.') parser.add_argument('password', help='Password of the user.') parser.add_argument('--blacklist', help='Packages matched by this requirements.txt style file will never be build.') args = parser.parse_args(args=args) with wheeler.Builder() as builder, devpi.Client(args.index, args.user, args.password) as devpi_client: for package, version in requirements.read(args.requirements): if devpi_client.package_version_exists(package, version): continue if args.blacklist and requirements.matched_by_file(package, version, args.blacklist): print('Skipping {} {} as it is matched by the blacklist.'.format(package, version)) else: print('Building {} {}.'.format(package, version)) try: wheel_file = builder(package, version) devpi_client.upload(wheel_file) except wheeler.BuildError as e: print(e)
import argparse import logging from devpi_builder import requirements, wheeler, devpi logging.basicConfig() logger = logging.getLogger(__name__) def main(args=None): parser = argparse.ArgumentParser(description='Create wheels for all given project versions and upload them to the given index.') parser.add_argument('requirements', help='requirements.txt style file specifying which project versions to package.') parser.add_argument('index', help='The index to upload the packaged software to.') parser.add_argument('user', help='The user to log in as.') parser.add_argument('password', help='Password of the user.') parser.add_argument('--blacklist', help='Packages matched by this requirements.txt style file will never be build.') args = parser.parse_args(args=args) with wheeler.Builder() as builder, devpi.Client(args.index, args.user, args.password) as devpi_client: for package, version in requirements.read(args.requirements): if devpi_client.package_version_exists(package, version): continue if args.blacklist and requirements.matched_by_file(package, version, args.blacklist): logger.info('Skipping %s %s as it is matched by the blacklist.', package, version) else: logger.info('Building %s %s', package, version) try: wheel_file = builder(package, version) devpi_client.upload(wheel_file) except wheeler.BuildError as e: logger.exception(e)
Use a logger instead of printing to stdout
Use a logger instead of printing to stdout
Python
bsd-3-clause
tylerdave/devpi-builder
import argparse + import logging from devpi_builder import requirements, wheeler, devpi + + logging.basicConfig() + logger = logging.getLogger(__name__) def main(args=None): parser = argparse.ArgumentParser(description='Create wheels for all given project versions and upload them to the given index.') parser.add_argument('requirements', help='requirements.txt style file specifying which project versions to package.') parser.add_argument('index', help='The index to upload the packaged software to.') parser.add_argument('user', help='The user to log in as.') parser.add_argument('password', help='Password of the user.') parser.add_argument('--blacklist', help='Packages matched by this requirements.txt style file will never be build.') args = parser.parse_args(args=args) with wheeler.Builder() as builder, devpi.Client(args.index, args.user, args.password) as devpi_client: for package, version in requirements.read(args.requirements): if devpi_client.package_version_exists(package, version): continue if args.blacklist and requirements.matched_by_file(package, version, args.blacklist): - print('Skipping {} {} as it is matched by the blacklist.'.format(package, version)) + logger.info('Skipping %s %s as it is matched by the blacklist.', package, version) else: - print('Building {} {}.'.format(package, version)) + logger.info('Building %s %s', package, version) try: wheel_file = builder(package, version) devpi_client.upload(wheel_file) except wheeler.BuildError as e: - print(e) + logger.exception(e)
Use a logger instead of printing to stdout
## Code Before: import argparse from devpi_builder import requirements, wheeler, devpi def main(args=None): parser = argparse.ArgumentParser(description='Create wheels for all given project versions and upload them to the given index.') parser.add_argument('requirements', help='requirements.txt style file specifying which project versions to package.') parser.add_argument('index', help='The index to upload the packaged software to.') parser.add_argument('user', help='The user to log in as.') parser.add_argument('password', help='Password of the user.') parser.add_argument('--blacklist', help='Packages matched by this requirements.txt style file will never be build.') args = parser.parse_args(args=args) with wheeler.Builder() as builder, devpi.Client(args.index, args.user, args.password) as devpi_client: for package, version in requirements.read(args.requirements): if devpi_client.package_version_exists(package, version): continue if args.blacklist and requirements.matched_by_file(package, version, args.blacklist): print('Skipping {} {} as it is matched by the blacklist.'.format(package, version)) else: print('Building {} {}.'.format(package, version)) try: wheel_file = builder(package, version) devpi_client.upload(wheel_file) except wheeler.BuildError as e: print(e) ## Instruction: Use a logger instead of printing to stdout ## Code After: import argparse import logging from devpi_builder import requirements, wheeler, devpi logging.basicConfig() logger = logging.getLogger(__name__) def main(args=None): parser = argparse.ArgumentParser(description='Create wheels for all given project versions and upload them to the given index.') parser.add_argument('requirements', help='requirements.txt style file specifying which project versions to package.') parser.add_argument('index', help='The index to upload the packaged software to.') parser.add_argument('user', help='The user to log in as.') parser.add_argument('password', help='Password of the user.') parser.add_argument('--blacklist', help='Packages matched by this requirements.txt style file will never be build.') args = parser.parse_args(args=args) with wheeler.Builder() as builder, devpi.Client(args.index, args.user, args.password) as devpi_client: for package, version in requirements.read(args.requirements): if devpi_client.package_version_exists(package, version): continue if args.blacklist and requirements.matched_by_file(package, version, args.blacklist): logger.info('Skipping %s %s as it is matched by the blacklist.', package, version) else: logger.info('Building %s %s', package, version) try: wheel_file = builder(package, version) devpi_client.upload(wheel_file) except wheeler.BuildError as e: logger.exception(e)
import argparse + import logging from devpi_builder import requirements, wheeler, devpi + + logging.basicConfig() + logger = logging.getLogger(__name__) def main(args=None): parser = argparse.ArgumentParser(description='Create wheels for all given project versions and upload them to the given index.') parser.add_argument('requirements', help='requirements.txt style file specifying which project versions to package.') parser.add_argument('index', help='The index to upload the packaged software to.') parser.add_argument('user', help='The user to log in as.') parser.add_argument('password', help='Password of the user.') parser.add_argument('--blacklist', help='Packages matched by this requirements.txt style file will never be build.') args = parser.parse_args(args=args) with wheeler.Builder() as builder, devpi.Client(args.index, args.user, args.password) as devpi_client: for package, version in requirements.read(args.requirements): if devpi_client.package_version_exists(package, version): continue if args.blacklist and requirements.matched_by_file(package, version, args.blacklist): - print('Skipping {} {} as it is matched by the blacklist.'.format(package, version)) ? ^ ^ ^^ ^^ ^^^^^^^^ - + logger.info('Skipping %s %s as it is matched by the blacklist.', package, version) ? ^^^^^ + ^^ ^^ ^^ ^^ else: - print('Building {} {}.'.format(package, version)) ? ^ ^ ^^ ^^^ ^^^^^^^^ - + logger.info('Building %s %s', package, version) ? ^^^^^ + ^^ ^^ ^^ ^^ try: wheel_file = builder(package, version) devpi_client.upload(wheel_file) except wheeler.BuildError as e: - print(e) ? ^ - + logger.exception(e) ? +++++++++++ ^ +
cfe78dabea226e24928d26183f4b135c52b64663
feder/cases/forms.py
feder/cases/forms.py
from atom.ext.crispy_forms.forms import SingleButtonMixin from braces.forms import UserKwargModelFormMixin from django import forms from .models import Case class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm): def __init__(self, *args, **kwargs): self.monitoring = kwargs.pop('monitoring', None) super(CaseForm, self).__init__(*args, **kwargs) def save(self, *args, **kwargs): if self.monitoring: self.instance.monitoring = self.monitoring super(CaseForm, self).save(*args, **kwargs) class Meta: model = Case fields = ['name', 'institution']
from atom.ext.crispy_forms.forms import SingleButtonMixin from braces.forms import UserKwargModelFormMixin from django import forms from .models import Case class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm): def __init__(self, *args, **kwargs): self.monitoring = kwargs.pop('monitoring', None) super(CaseForm, self).__init__(*args, **kwargs) if self.monitoring: self.instance.monitoring = self.monitoring class Meta: model = Case fields = ['name', 'institution']
Clean up form in CaseForm
Clean up form in CaseForm
Python
mit
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
from atom.ext.crispy_forms.forms import SingleButtonMixin from braces.forms import UserKwargModelFormMixin from django import forms from .models import Case class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm): def __init__(self, *args, **kwargs): self.monitoring = kwargs.pop('monitoring', None) super(CaseForm, self).__init__(*args, **kwargs) - - def save(self, *args, **kwargs): if self.monitoring: self.instance.monitoring = self.monitoring - super(CaseForm, self).save(*args, **kwargs) class Meta: model = Case fields = ['name', 'institution']
Clean up form in CaseForm
## Code Before: from atom.ext.crispy_forms.forms import SingleButtonMixin from braces.forms import UserKwargModelFormMixin from django import forms from .models import Case class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm): def __init__(self, *args, **kwargs): self.monitoring = kwargs.pop('monitoring', None) super(CaseForm, self).__init__(*args, **kwargs) def save(self, *args, **kwargs): if self.monitoring: self.instance.monitoring = self.monitoring super(CaseForm, self).save(*args, **kwargs) class Meta: model = Case fields = ['name', 'institution'] ## Instruction: Clean up form in CaseForm ## Code After: from atom.ext.crispy_forms.forms import SingleButtonMixin from braces.forms import UserKwargModelFormMixin from django import forms from .models import Case class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm): def __init__(self, *args, **kwargs): self.monitoring = kwargs.pop('monitoring', None) super(CaseForm, self).__init__(*args, **kwargs) if self.monitoring: self.instance.monitoring = self.monitoring class Meta: model = Case fields = ['name', 'institution']
from atom.ext.crispy_forms.forms import SingleButtonMixin from braces.forms import UserKwargModelFormMixin from django import forms from .models import Case class CaseForm(SingleButtonMixin, UserKwargModelFormMixin, forms.ModelForm): def __init__(self, *args, **kwargs): self.monitoring = kwargs.pop('monitoring', None) super(CaseForm, self).__init__(*args, **kwargs) - - def save(self, *args, **kwargs): if self.monitoring: self.instance.monitoring = self.monitoring - super(CaseForm, self).save(*args, **kwargs) class Meta: model = Case fields = ['name', 'institution']
acf2729f368ad4eabc0219d1a191089e8d5f740f
dmz/geolocate.py
dmz/geolocate.py
import pygeoip def geolocate(ip_addresses): #Read in files, storing in memory for speed ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1) ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1) #Check type if not(isinstance(ip_addresses,list)): ip_addresses = [ip_addresses] #Construct output list output = [] #For each entry in the input list, retrieve the country code and add it to the output object for entry in ip_addresses: if(bool(re.search(":",entry))): try: output.append(ip6_geo.country_code_by_addr(entry)) except: output.append("Invalid") else: try: output.append(ip4_geo.country_code_by_addr(entry)) except: output.append("Invalid") #Return return output
import pygeoip class GeoLocator(object): """Geo locate IP addresses using the MaxMind database""" def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat', ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'): self.ipv4_geo_path = ipv4_geo_path self.ipv6_geo_path = ipv6_geo_path @property def ipv4_geo(self): """Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data. The data is stored in memory, and loaded up only when first requested""" if not hasattr(self, '_ipv4_geo'): self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1) return self._ipv4_geo @property def ipv6_geo(self): """Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data. The data is stored in memory, and loaded up only when first requested""" if not hasattr(self, '_ipv6_geo'): self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1) return self._ipv6_geo def _check_if_ipv6(self, ip_address): """Return true if given ip_address is IPv6, false otherwise""" try: # socket.inet_pton throws an exception if it isn't a valid address # of the stated address class socket.inet_pton(socket.AF_INET6, ip_address) return True except: return False def find_country(self, ip_address): """Return best guess of country in which this IP address resides""" if self._check_if_ipv6(ip_address): return self.ipv6_geo.country_code_by_addr(ip_address) else: return self.ipv4_geo.country_code_by_addr(ip_address)
Move the Geo Location stuff into a class
Move the Geo Location stuff into a class
Python
mit
yuvipanda/edit-stats
import pygeoip + class GeoLocator(object): + """Geo locate IP addresses using the MaxMind database""" + def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat', + ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'): + self.ipv4_geo_path = ipv4_geo_path + self.ipv6_geo_path = ipv6_geo_path - def geolocate(ip_addresses): - - #Read in files, storing in memory for speed - ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1) - ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1) - - #Check type - if not(isinstance(ip_addresses,list)): - ip_addresses = [ip_addresses] - - #Construct output list - output = [] - - #For each entry in the input list, retrieve the country code and add it to the output object - for entry in ip_addresses: - - if(bool(re.search(":",entry))): - - try: - - output.append(ip6_geo.country_code_by_addr(entry)) - - except: - - output.append("Invalid") - - else: - - try: - - output.append(ip4_geo.country_code_by_addr(entry)) - - except: - - output.append("Invalid") - - #Return - return output + @property + def ipv4_geo(self): + """Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data. + + The data is stored in memory, and loaded up only when first requested""" + if not hasattr(self, '_ipv4_geo'): + self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1) + return self._ipv4_geo + + @property + def ipv6_geo(self): + """Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data. + + The data is stored in memory, and loaded up only when first requested""" + if not hasattr(self, '_ipv6_geo'): + self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1) + return self._ipv6_geo + + def _check_if_ipv6(self, ip_address): + """Return true if given ip_address is IPv6, false otherwise""" + try: + # socket.inet_pton throws an exception if it isn't a valid address + # of the stated address class + socket.inet_pton(socket.AF_INET6, ip_address) + return True + except: + return False + + def find_country(self, ip_address): + """Return best guess of country in which this IP address resides""" + if self._check_if_ipv6(ip_address): + return self.ipv6_geo.country_code_by_addr(ip_address) + else: + return self.ipv4_geo.country_code_by_addr(ip_address) +
Move the Geo Location stuff into a class
## Code Before: import pygeoip def geolocate(ip_addresses): #Read in files, storing in memory for speed ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1) ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1) #Check type if not(isinstance(ip_addresses,list)): ip_addresses = [ip_addresses] #Construct output list output = [] #For each entry in the input list, retrieve the country code and add it to the output object for entry in ip_addresses: if(bool(re.search(":",entry))): try: output.append(ip6_geo.country_code_by_addr(entry)) except: output.append("Invalid") else: try: output.append(ip4_geo.country_code_by_addr(entry)) except: output.append("Invalid") #Return return output ## Instruction: Move the Geo Location stuff into a class ## Code After: import pygeoip class GeoLocator(object): """Geo locate IP addresses using the MaxMind database""" def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat', ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'): self.ipv4_geo_path = ipv4_geo_path self.ipv6_geo_path = ipv6_geo_path @property def ipv4_geo(self): """Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data. The data is stored in memory, and loaded up only when first requested""" if not hasattr(self, '_ipv4_geo'): self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1) return self._ipv4_geo @property def ipv6_geo(self): """Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data. The data is stored in memory, and loaded up only when first requested""" if not hasattr(self, '_ipv6_geo'): self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1) return self._ipv6_geo def _check_if_ipv6(self, ip_address): """Return true if given ip_address is IPv6, false otherwise""" try: # socket.inet_pton throws an exception if it isn't a valid address # of the stated address class socket.inet_pton(socket.AF_INET6, ip_address) return True except: return False def find_country(self, ip_address): """Return best guess of country in which this IP address resides""" if self._check_if_ipv6(ip_address): return self.ipv6_geo.country_code_by_addr(ip_address) else: return self.ipv4_geo.country_code_by_addr(ip_address)
import pygeoip - def geolocate(ip_addresses): - - #Read in files, storing in memory for speed - ip4_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIP.dat", flags = 1) - ip6_geo = pygeoip.GeoIP(filename = "/usr/share/GeoIP/GeoIPv6.dat", flags = 1) - - #Check type - if not(isinstance(ip_addresses,list)): - ip_addresses = [ip_addresses] - - #Construct output list - output = [] - - #For each entry in the input list, retrieve the country code and add it to the output object - for entry in ip_addresses: - - if(bool(re.search(":",entry))): - + class GeoLocator(object): + """Geo locate IP addresses using the MaxMind database""" + def __init__(self, ipv4_geo_path='/usr/share/GeoIP/GeoIP.dat', + ipv6_geo_path='/usr/share/GeoIP/GeoIPv6.dat'): + self.ipv4_geo_path = ipv4_geo_path + self.ipv6_geo_path = ipv6_geo_path + + @property + def ipv4_geo(self): + """Return an instance of pygeoip.GeoIP loaded with IPv4 geolocation data. + + The data is stored in memory, and loaded up only when first requested""" + if not hasattr(self, '_ipv4_geo'): + self._ipv4_geo = pygeoip.GeoIP(filename=self.ipv4_geo_path, flags=1) + return self._ipv4_geo + + @property + def ipv6_geo(self): + """Return an instance of pygeoip.GeoIP loaded with IPv6 geolocation data. + + The data is stored in memory, and loaded up only when first requested""" + if not hasattr(self, '_ipv6_geo'): + self._ipv6_geo = pygeoip.GeoIP(filename=self.ipv6_geo_path, flags=1) + return self._ipv6_geo + + def _check_if_ipv6(self, ip_address): + """Return true if given ip_address is IPv6, false otherwise""" - try: + try: ? ++ - - output.append(ip6_geo.country_code_by_addr(entry)) - + # socket.inet_pton throws an exception if it isn't a valid address + # of the stated address class + socket.inet_pton(socket.AF_INET6, ip_address) + return True - except: + except: ? ++ - - output.append("Invalid") - + return False + + def find_country(self, ip_address): + """Return best guess of country in which this IP address resides""" + if self._check_if_ipv6(ip_address): + return self.ipv6_geo.country_code_by_addr(ip_address) - else: + else: ? ++++ + return self.ipv4_geo.country_code_by_addr(ip_address) - - try: - - output.append(ip4_geo.country_code_by_addr(entry)) - - except: - - output.append("Invalid") - - #Return - return output
ee0a0b492b5536e0cc8c8e561875254698416eb4
lib/ansible/utils/string_functions.py
lib/ansible/utils/string_functions.py
def isprintable(instring): #http://stackoverflow.com/a/3637294 import string printset = set(string.printable) isprintable = set(instring).issubset(printset) return isprintable def count_newlines_from_end(str): i = len(str) while i > 0: if str[i-1] != '\n': break i -= 1 return len(str) - i
def isprintable(instring): if isinstance(instring, str): #http://stackoverflow.com/a/3637294 import string printset = set(string.printable) isprintable = set(instring).issubset(printset) return isprintable else: return True def count_newlines_from_end(str): i = len(str) while i > 0: if str[i-1] != '\n': break i -= 1 return len(str) - i
Allow isprintable() util function to work with unicode
Allow isprintable() util function to work with unicode Fixes #6842
Python
mit
thaim/ansible,thaim/ansible
def isprintable(instring): + if isinstance(instring, str): - #http://stackoverflow.com/a/3637294 + #http://stackoverflow.com/a/3637294 - import string + import string - printset = set(string.printable) + printset = set(string.printable) - isprintable = set(instring).issubset(printset) + isprintable = set(instring).issubset(printset) - return isprintable + return isprintable + else: + return True def count_newlines_from_end(str): i = len(str) while i > 0: if str[i-1] != '\n': break i -= 1 return len(str) - i
Allow isprintable() util function to work with unicode
## Code Before: def isprintable(instring): #http://stackoverflow.com/a/3637294 import string printset = set(string.printable) isprintable = set(instring).issubset(printset) return isprintable def count_newlines_from_end(str): i = len(str) while i > 0: if str[i-1] != '\n': break i -= 1 return len(str) - i ## Instruction: Allow isprintable() util function to work with unicode ## Code After: def isprintable(instring): if isinstance(instring, str): #http://stackoverflow.com/a/3637294 import string printset = set(string.printable) isprintable = set(instring).issubset(printset) return isprintable else: return True def count_newlines_from_end(str): i = len(str) while i > 0: if str[i-1] != '\n': break i -= 1 return len(str) - i
def isprintable(instring): + if isinstance(instring, str): - #http://stackoverflow.com/a/3637294 + #http://stackoverflow.com/a/3637294 ? ++++ - import string + import string ? ++++ - printset = set(string.printable) + printset = set(string.printable) ? ++++ - isprintable = set(instring).issubset(printset) + isprintable = set(instring).issubset(printset) ? ++++ - return isprintable + return isprintable ? ++++ + else: + return True def count_newlines_from_end(str): i = len(str) while i > 0: if str[i-1] != '\n': break i -= 1 return len(str) - i
ddc03637b19059f6fb06d72dc380afaf4fba57c2
indra/tests/test_context.py
indra/tests/test_context.py
from indra.databases import context_client def test_get_protein_expression(): res = context_client.get_protein_expression('EGFR', 'BT20_BREAST') assert(res is not None) assert(res.get('EGFR') is not None) assert(res['EGFR'].get('BT20_BREAST') is not None) assert(res['EGFR']['BT20_BREAST'] > 1000) def test_get_mutations(): res = context_client.get_mutations('BRAF', 'A375_SKIN') assert(res is not None) assert(res.get('BRAF') is not None) assert(res['BRAF'].get('A375_SKIN') is not None) assert(res['BRAF']['A375_SKIN'] == 1.0) def test_send_request_bad(): res = context_client.send_request('xyz', None) assert(res is None)
from indra.databases import context_client def test_get_protein_expression(): res = context_client.get_protein_expression('EGFR', 'BT20_BREAST') assert(res is not None) assert(res.get('EGFR') is not None) assert(res['EGFR'].get('BT20_BREAST') is not None) assert(res['EGFR']['BT20_BREAST'] > 1000) def test_get_mutations(): res = context_client.get_mutations('BRAF', 'A375_SKIN') assert(res is not None) assert(res.get('BRAF') is not None) assert(res['BRAF'].get('A375_SKIN') is not None) assert(res['BRAF']['A375_SKIN'] == 1.0)
Remove deprecated context client test
Remove deprecated context client test
Python
bsd-2-clause
johnbachman/belpy,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,jmuhlich/indra,jmuhlich/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,jmuhlich/indra,bgyori/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,bgyori/indra
from indra.databases import context_client def test_get_protein_expression(): res = context_client.get_protein_expression('EGFR', 'BT20_BREAST') assert(res is not None) assert(res.get('EGFR') is not None) assert(res['EGFR'].get('BT20_BREAST') is not None) assert(res['EGFR']['BT20_BREAST'] > 1000) def test_get_mutations(): res = context_client.get_mutations('BRAF', 'A375_SKIN') assert(res is not None) assert(res.get('BRAF') is not None) assert(res['BRAF'].get('A375_SKIN') is not None) assert(res['BRAF']['A375_SKIN'] == 1.0) - def test_send_request_bad(): - res = context_client.send_request('xyz', None) - assert(res is None) -
Remove deprecated context client test
## Code Before: from indra.databases import context_client def test_get_protein_expression(): res = context_client.get_protein_expression('EGFR', 'BT20_BREAST') assert(res is not None) assert(res.get('EGFR') is not None) assert(res['EGFR'].get('BT20_BREAST') is not None) assert(res['EGFR']['BT20_BREAST'] > 1000) def test_get_mutations(): res = context_client.get_mutations('BRAF', 'A375_SKIN') assert(res is not None) assert(res.get('BRAF') is not None) assert(res['BRAF'].get('A375_SKIN') is not None) assert(res['BRAF']['A375_SKIN'] == 1.0) def test_send_request_bad(): res = context_client.send_request('xyz', None) assert(res is None) ## Instruction: Remove deprecated context client test ## Code After: from indra.databases import context_client def test_get_protein_expression(): res = context_client.get_protein_expression('EGFR', 'BT20_BREAST') assert(res is not None) assert(res.get('EGFR') is not None) assert(res['EGFR'].get('BT20_BREAST') is not None) assert(res['EGFR']['BT20_BREAST'] > 1000) def test_get_mutations(): res = context_client.get_mutations('BRAF', 'A375_SKIN') assert(res is not None) assert(res.get('BRAF') is not None) assert(res['BRAF'].get('A375_SKIN') is not None) assert(res['BRAF']['A375_SKIN'] == 1.0)
from indra.databases import context_client def test_get_protein_expression(): res = context_client.get_protein_expression('EGFR', 'BT20_BREAST') assert(res is not None) assert(res.get('EGFR') is not None) assert(res['EGFR'].get('BT20_BREAST') is not None) assert(res['EGFR']['BT20_BREAST'] > 1000) def test_get_mutations(): res = context_client.get_mutations('BRAF', 'A375_SKIN') assert(res is not None) assert(res.get('BRAF') is not None) assert(res['BRAF'].get('A375_SKIN') is not None) assert(res['BRAF']['A375_SKIN'] == 1.0) - - def test_send_request_bad(): - res = context_client.send_request('xyz', None) - assert(res is None)
63afb46b7a39881c3a3655af645d5414bdd730ea
edumed/forum.py
edumed/forum.py
from pybb.permissions import DefaultPermissionHandler class ForumPermissionHandler(DefaultPermissionHandler): def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return False
from pybb.permissions import DefaultPermissionHandler class ForumPermissionHandler(DefaultPermissionHandler): def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return False def may_create_topic(self, user, forum): """ return True if `user` is allowed to create a new topic in `forum` """ return user.is_authenticated() def may_create_post(self, user, topic): """ return True if `user` is allowed to create a new post in `topic` """ if topic.forum.hidden and (not user.is_staff): # if topic is hidden, only staff may post return False if topic.closed and (not user.is_staff): # if topic is closed, only staff may post return False return user.is_authenticated()
Allow for authenticated non super users to create posts and topics
Allow for authenticated non super users to create posts and topics
Python
agpl-3.0
fnp/edumed,fnp/edumed,fnp/edumed
from pybb.permissions import DefaultPermissionHandler class ForumPermissionHandler(DefaultPermissionHandler): def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return False + def may_create_topic(self, user, forum): + """ return True if `user` is allowed to create a new topic in `forum` """ + return user.is_authenticated() + + def may_create_post(self, user, topic): + """ return True if `user` is allowed to create a new post in `topic` """ + + if topic.forum.hidden and (not user.is_staff): + # if topic is hidden, only staff may post + return False + + if topic.closed and (not user.is_staff): + # if topic is closed, only staff may post + return False + + return user.is_authenticated() +
Allow for authenticated non super users to create posts and topics
## Code Before: from pybb.permissions import DefaultPermissionHandler class ForumPermissionHandler(DefaultPermissionHandler): def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return False ## Instruction: Allow for authenticated non super users to create posts and topics ## Code After: from pybb.permissions import DefaultPermissionHandler class ForumPermissionHandler(DefaultPermissionHandler): def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return False def may_create_topic(self, user, forum): """ return True if `user` is allowed to create a new topic in `forum` """ return user.is_authenticated() def may_create_post(self, user, topic): """ return True if `user` is allowed to create a new post in `topic` """ if topic.forum.hidden and (not user.is_staff): # if topic is hidden, only staff may post return False if topic.closed and (not user.is_staff): # if topic is closed, only staff may post return False return user.is_authenticated()
from pybb.permissions import DefaultPermissionHandler class ForumPermissionHandler(DefaultPermissionHandler): def may_post_as_admin(self, user): """ return True if `user` may post as admin """ return False + + def may_create_topic(self, user, forum): + """ return True if `user` is allowed to create a new topic in `forum` """ + return user.is_authenticated() + + def may_create_post(self, user, topic): + """ return True if `user` is allowed to create a new post in `topic` """ + + if topic.forum.hidden and (not user.is_staff): + # if topic is hidden, only staff may post + return False + + if topic.closed and (not user.is_staff): + # if topic is closed, only staff may post + return False + + return user.is_authenticated() +
8d8dd559252bc32388e224746f2ae8cdbdceaae4
masters/master.client.syzygy/master_win_official_cfg.py
masters/master.client.syzygy/master_win_official_cfg.py
from buildbot.scheduler import Scheduler from buildbot.changes.filter import ChangeFilter from master.factory import syzygy_factory def win(): return syzygy_factory.SyzygyFactory('src/syzygy', target_platform='win32') def _VersionFileFilter(change): """A change filter function that disregards all changes that don't touch src/syzygy/VERSION. Args: change: a buildbot Change object. """ return change.branch == 'trunk' and 'syzygy/VERSION' in change.files # # Official build scheduler for Syzygy # official_scheduler = Scheduler('syzygy_version', treeStableTimer=0, change_filter=ChangeFilter( filter_fn=_VersionFileFilter), builderNames=['Syzygy Official']) # # Windows official Release builder # official_factory = win().SyzygyFactory(official_release=True) official_builder = { 'name': 'Syzygy Official', 'factory': official_factory, 'schedulers': 'syzygy_version', 'auto_reboot': False, 'category': 'official', } def Update(config, active_master, c): c['schedulers'].append(official_scheduler) c['builders'].append(official_builder)
from buildbot.scheduler import Scheduler # This is due to buildbot 0.7.12 being used for the presubmit check. from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401 from master.factory import syzygy_factory def win(): return syzygy_factory.SyzygyFactory('src/syzygy', target_platform='win32') def _VersionFileFilter(change): """A change filter function that disregards all changes that don't touch src/syzygy/VERSION. Args: change: a buildbot Change object. """ return change.branch == 'trunk' and 'syzygy/VERSION' in change.files # # Official build scheduler for Syzygy # official_scheduler = Scheduler('syzygy_version', treeStableTimer=0, change_filter=ChangeFilter( filter_fn=_VersionFileFilter), builderNames=['Syzygy Official']) # # Windows official Release builder # official_factory = win().SyzygyFactory(official_release=True) official_builder = { 'name': 'Syzygy Official', 'factory': official_factory, 'schedulers': 'syzygy_version', 'auto_reboot': False, 'category': 'official', } def Update(config, active_master, c): c['schedulers'].append(official_scheduler) c['builders'].append(official_builder)
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x [email protected] BUG= TEST= Review URL: http://codereview.chromium.org/7631036 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@97254 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
from buildbot.scheduler import Scheduler + # This is due to buildbot 0.7.12 being used for the presubmit check. - from buildbot.changes.filter import ChangeFilter + from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401 from master.factory import syzygy_factory def win(): return syzygy_factory.SyzygyFactory('src/syzygy', target_platform='win32') def _VersionFileFilter(change): """A change filter function that disregards all changes that don't touch src/syzygy/VERSION. Args: change: a buildbot Change object. """ return change.branch == 'trunk' and 'syzygy/VERSION' in change.files # # Official build scheduler for Syzygy # official_scheduler = Scheduler('syzygy_version', treeStableTimer=0, change_filter=ChangeFilter( filter_fn=_VersionFileFilter), builderNames=['Syzygy Official']) # # Windows official Release builder # official_factory = win().SyzygyFactory(official_release=True) official_builder = { 'name': 'Syzygy Official', 'factory': official_factory, 'schedulers': 'syzygy_version', 'auto_reboot': False, 'category': 'official', } def Update(config, active_master, c): c['schedulers'].append(official_scheduler) c['builders'].append(official_builder)
Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x
## Code Before: from buildbot.scheduler import Scheduler from buildbot.changes.filter import ChangeFilter from master.factory import syzygy_factory def win(): return syzygy_factory.SyzygyFactory('src/syzygy', target_platform='win32') def _VersionFileFilter(change): """A change filter function that disregards all changes that don't touch src/syzygy/VERSION. Args: change: a buildbot Change object. """ return change.branch == 'trunk' and 'syzygy/VERSION' in change.files # # Official build scheduler for Syzygy # official_scheduler = Scheduler('syzygy_version', treeStableTimer=0, change_filter=ChangeFilter( filter_fn=_VersionFileFilter), builderNames=['Syzygy Official']) # # Windows official Release builder # official_factory = win().SyzygyFactory(official_release=True) official_builder = { 'name': 'Syzygy Official', 'factory': official_factory, 'schedulers': 'syzygy_version', 'auto_reboot': False, 'category': 'official', } def Update(config, active_master, c): c['schedulers'].append(official_scheduler) c['builders'].append(official_builder) ## Instruction: Fix pylint presubmit check, related to buildbot 0.8.x vs 0.7.x ## Code After: from buildbot.scheduler import Scheduler # This is due to buildbot 0.7.12 being used for the presubmit check. from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401 from master.factory import syzygy_factory def win(): return syzygy_factory.SyzygyFactory('src/syzygy', target_platform='win32') def _VersionFileFilter(change): """A change filter function that disregards all changes that don't touch src/syzygy/VERSION. Args: change: a buildbot Change object. """ return change.branch == 'trunk' and 'syzygy/VERSION' in change.files # # Official build scheduler for Syzygy # official_scheduler = Scheduler('syzygy_version', treeStableTimer=0, change_filter=ChangeFilter( filter_fn=_VersionFileFilter), builderNames=['Syzygy Official']) # # Windows official Release builder # official_factory = win().SyzygyFactory(official_release=True) official_builder = { 'name': 'Syzygy Official', 'factory': official_factory, 'schedulers': 'syzygy_version', 'auto_reboot': False, 'category': 'official', } def Update(config, active_master, c): c['schedulers'].append(official_scheduler) c['builders'].append(official_builder)
from buildbot.scheduler import Scheduler + # This is due to buildbot 0.7.12 being used for the presubmit check. - from buildbot.changes.filter import ChangeFilter + from buildbot.changes.filter import ChangeFilter # pylint: disable=E0611,F0401 ? +++++++++++++++++++++++++++++++ from master.factory import syzygy_factory def win(): return syzygy_factory.SyzygyFactory('src/syzygy', target_platform='win32') def _VersionFileFilter(change): """A change filter function that disregards all changes that don't touch src/syzygy/VERSION. Args: change: a buildbot Change object. """ return change.branch == 'trunk' and 'syzygy/VERSION' in change.files # # Official build scheduler for Syzygy # official_scheduler = Scheduler('syzygy_version', treeStableTimer=0, change_filter=ChangeFilter( filter_fn=_VersionFileFilter), builderNames=['Syzygy Official']) # # Windows official Release builder # official_factory = win().SyzygyFactory(official_release=True) official_builder = { 'name': 'Syzygy Official', 'factory': official_factory, 'schedulers': 'syzygy_version', 'auto_reboot': False, 'category': 'official', } def Update(config, active_master, c): c['schedulers'].append(official_scheduler) c['builders'].append(official_builder)
62febcd8d6fcefdf2db3f411807fcf96c91228b8
tests/example_app.py
tests/example_app.py
from __future__ import print_function, unicode_literals import time # http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def main(): print('hi, there!') time.sleep(0.010) print('let\'s get to know each other better...') name = raw_input("Please enter your name: ") print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name + bcolors.ENDC) print('It was a pleasure talking to you...') if __name__ == '__main__': main()
from __future__ import print_function, unicode_literals import sys, time PY3 = sys.version_info[0] >= 3 if PY3: raw_input = input # http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def main(): print('hi, there!') time.sleep(0.010) print('let\'s get to know each other better...') name = raw_input("Please enter your name: ") print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name + bcolors.ENDC) print('It was a pleasure talking to you...') if __name__ == '__main__': main()
Define raw_input as input under Python 3
Define raw_input as input under Python 3
Python
mit
finklabs/inquirer,finklabs/whaaaaat
from __future__ import print_function, unicode_literals - import time + import sys, time + + + PY3 = sys.version_info[0] >= 3 + + if PY3: + raw_input = input + # http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def main(): print('hi, there!') time.sleep(0.010) print('let\'s get to know each other better...') name = raw_input("Please enter your name: ") print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name + bcolors.ENDC) print('It was a pleasure talking to you...') if __name__ == '__main__': main()
Define raw_input as input under Python 3
## Code Before: from __future__ import print_function, unicode_literals import time # http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def main(): print('hi, there!') time.sleep(0.010) print('let\'s get to know each other better...') name = raw_input("Please enter your name: ") print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name + bcolors.ENDC) print('It was a pleasure talking to you...') if __name__ == '__main__': main() ## Instruction: Define raw_input as input under Python 3 ## Code After: from __future__ import print_function, unicode_literals import sys, time PY3 = sys.version_info[0] >= 3 if PY3: raw_input = input # http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def main(): print('hi, there!') time.sleep(0.010) print('let\'s get to know each other better...') name = raw_input("Please enter your name: ") print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name + bcolors.ENDC) print('It was a pleasure talking to you...') if __name__ == '__main__': main()
from __future__ import print_function, unicode_literals - import time + import sys, time ? +++++ + + + PY3 = sys.version_info[0] >= 3 + + if PY3: + raw_input = input + # http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def main(): print('hi, there!') time.sleep(0.010) print('let\'s get to know each other better...') name = raw_input("Please enter your name: ") print(bcolors.BOLD + bcolors.UNDERLINE + 'Hi %s, have a nice day!' % name + bcolors.ENDC) print('It was a pleasure talking to you...') if __name__ == '__main__': main()
cf19d5a52237e6098dedc3c0bbfdaa3aedd180e0
loginza/models.py
loginza/models.py
from django.contrib.auth.models import User from django.db import models from django.utils import simplejson as json from .signals import post_associate class IdentityManager(models.Manager): def from_loginza_data(self, loginza_data): data = json.dumps(loginza_data) identity, created = self.get_or_create( identity = loginza_data['identity'], provider = loginza_data['provider'], defaults = {'data': data} ) if not created: identity.data = data identity.save() return identity class Identity(models.Model): identity = models.CharField(max_length=255) provider = models.CharField(max_length=255) user = models.ForeignKey(User, null=True) data = models.TextField() objects = IdentityManager() class Meta: unique_together = (('identity', 'provider'),) def associate(self, user): self.user = user self.save() post_associate.send(sender=type(self), instance=self) def create_user(self, username, email, password=None): existing_users = 0 while True: existing_users += 1 try: User.objects.get(username=username) except User.DoesNotExist: break username = '%s_%d' % (username, existing_users) user = User.objects.create_user(username, email, password) self.associate(user) return user
from django.contrib.auth.models import User from django.db import models from django.utils import simplejson as json from .signals import post_associate class IdentityManager(models.Manager): def from_loginza_data(self, loginza_data): data = json.dumps(loginza_data) identity, created = self.get_or_create( identity = loginza_data['identity'], provider = loginza_data['provider'], defaults = {'data': data} ) if not created: identity.data = data identity.save() return identity class Identity(models.Model): identity = models.CharField(max_length=255) provider = models.CharField(max_length=255) user = models.ForeignKey(User, null=True) data = models.TextField() objects = IdentityManager() class Meta: unique_together = (('identity', 'provider'),) def associate(self, user): self.user = user self.save() post_associate.send(sender=type(self), instance=self) def create_user(self, username, email, password=None): existing_users = 0 new_username = None while True: existing_users += 1 qs = User.objects.all() qs = qs.filter(username=new_username or username) if not qs.exists(): break new_username = '%s_%d' % (username, existing_users) user = User.objects.create_user(new_username or username, email, password) self.associate(user) return user
Fix user creation with unique username
Fix user creation with unique username
Python
isc
xobb1t/django-loginza-auth
from django.contrib.auth.models import User from django.db import models from django.utils import simplejson as json from .signals import post_associate class IdentityManager(models.Manager): def from_loginza_data(self, loginza_data): data = json.dumps(loginza_data) identity, created = self.get_or_create( identity = loginza_data['identity'], provider = loginza_data['provider'], defaults = {'data': data} ) if not created: identity.data = data identity.save() return identity class Identity(models.Model): identity = models.CharField(max_length=255) provider = models.CharField(max_length=255) user = models.ForeignKey(User, null=True) data = models.TextField() objects = IdentityManager() class Meta: unique_together = (('identity', 'provider'),) def associate(self, user): self.user = user self.save() post_associate.send(sender=type(self), instance=self) def create_user(self, username, email, password=None): existing_users = 0 + new_username = None while True: existing_users += 1 - try: - User.objects.get(username=username) - except User.DoesNotExist: + qs = User.objects.all() + qs = qs.filter(username=new_username or username) + if not qs.exists(): break - username = '%s_%d' % (username, existing_users) + new_username = '%s_%d' % (username, existing_users) - user = User.objects.create_user(username, email, password) + user = User.objects.create_user(new_username or username, email, password) self.associate(user) return user
Fix user creation with unique username
## Code Before: from django.contrib.auth.models import User from django.db import models from django.utils import simplejson as json from .signals import post_associate class IdentityManager(models.Manager): def from_loginza_data(self, loginza_data): data = json.dumps(loginza_data) identity, created = self.get_or_create( identity = loginza_data['identity'], provider = loginza_data['provider'], defaults = {'data': data} ) if not created: identity.data = data identity.save() return identity class Identity(models.Model): identity = models.CharField(max_length=255) provider = models.CharField(max_length=255) user = models.ForeignKey(User, null=True) data = models.TextField() objects = IdentityManager() class Meta: unique_together = (('identity', 'provider'),) def associate(self, user): self.user = user self.save() post_associate.send(sender=type(self), instance=self) def create_user(self, username, email, password=None): existing_users = 0 while True: existing_users += 1 try: User.objects.get(username=username) except User.DoesNotExist: break username = '%s_%d' % (username, existing_users) user = User.objects.create_user(username, email, password) self.associate(user) return user ## Instruction: Fix user creation with unique username ## Code After: from django.contrib.auth.models import User from django.db import models from django.utils import simplejson as json from .signals import post_associate class IdentityManager(models.Manager): def from_loginza_data(self, loginza_data): data = json.dumps(loginza_data) identity, created = self.get_or_create( identity = loginza_data['identity'], provider = loginza_data['provider'], defaults = {'data': data} ) if not created: identity.data = data identity.save() return identity class Identity(models.Model): identity = models.CharField(max_length=255) provider = models.CharField(max_length=255) user = models.ForeignKey(User, null=True) data = models.TextField() objects = IdentityManager() class Meta: unique_together = (('identity', 'provider'),) def associate(self, user): self.user = user self.save() post_associate.send(sender=type(self), instance=self) def create_user(self, username, email, password=None): existing_users = 0 new_username = None while True: existing_users += 1 qs = User.objects.all() qs = qs.filter(username=new_username or username) if not qs.exists(): break new_username = '%s_%d' % (username, existing_users) user = User.objects.create_user(new_username or username, email, password) self.associate(user) return user
from django.contrib.auth.models import User from django.db import models from django.utils import simplejson as json from .signals import post_associate class IdentityManager(models.Manager): def from_loginza_data(self, loginza_data): data = json.dumps(loginza_data) identity, created = self.get_or_create( identity = loginza_data['identity'], provider = loginza_data['provider'], defaults = {'data': data} ) if not created: identity.data = data identity.save() return identity class Identity(models.Model): identity = models.CharField(max_length=255) provider = models.CharField(max_length=255) user = models.ForeignKey(User, null=True) data = models.TextField() objects = IdentityManager() class Meta: unique_together = (('identity', 'provider'),) def associate(self, user): self.user = user self.save() post_associate.send(sender=type(self), instance=self) def create_user(self, username, email, password=None): existing_users = 0 + new_username = None while True: existing_users += 1 - try: - User.objects.get(username=username) - except User.DoesNotExist: + qs = User.objects.all() + qs = qs.filter(username=new_username or username) + if not qs.exists(): break - username = '%s_%d' % (username, existing_users) + new_username = '%s_%d' % (username, existing_users) ? ++++ - user = User.objects.create_user(username, email, password) + user = User.objects.create_user(new_username or username, email, password) ? ++++++++++++++++ self.associate(user) return user
ed279b7f2cfcfd4abdf1da36d8406a3f63603529
dss/mobile/__init__.py
dss/mobile/__init__.py
try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from .handler import MediaHandler class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from dss.storage import db from .handler import MediaHandler # If some streams are active, the program did no close properly. db.mobile.update({'active': True}, {'active': False}) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
Mark all mobile streams as inactive when the program starts.
Mark all mobile streams as inactive when the program starts.
Python
bsd-3-clause
terabit-software/dynamic-stream-server,hmoraes/dynamic-stream-server,terabit-software/dynamic-stream-server,hmoraes/dynamic-stream-server,terabit-software/dynamic-stream-server,terabit-software/dynamic-stream-server,hmoraes/dynamic-stream-server,hmoraes/dynamic-stream-server
try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config + from dss.storage import db from .handler import MediaHandler + + + # If some streams are active, the program did no close properly. + db.mobile.update({'active': True}, {'active': False}) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
Mark all mobile streams as inactive when the program starts.
## Code Before: try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from .handler import MediaHandler class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown() ## Instruction: Mark all mobile streams as inactive when the program starts. ## Code After: try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from dss.storage import db from .handler import MediaHandler # If some streams are active, the program did no close properly. db.mobile.update({'active': True}, {'active': False}) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config + from dss.storage import db from .handler import MediaHandler + + + # If some streams are active, the program did no close properly. + db.mobile.update({'active': True}, {'active': False}) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
458fb9b764cae3419b6513dcc1fedf2ea8949829
networkx/generators/tests/test_stochastic.py
networkx/generators/tests/test_stochastic.py
from nose.tools import assert_true, assert_equal,assert_raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_error(): G=nx.Graph() assert_raises(Exception,nx.stochastic_graph,G) G=nx.MultiGraph() assert_raises(Exception,nx.stochastic_graph,G)
from nose.tools import assert_true, assert_equal,assert_raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_ints(): G=nx.DiGraph() G.add_edge(0,1,weight=1) G.add_edge(0,2,weight=1) S=nx.stochastic_graph(G) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_error(): G=nx.Graph() assert_raises(Exception,nx.stochastic_graph,G) G=nx.MultiGraph() assert_raises(Exception,nx.stochastic_graph,G)
Test stochstic graph with ints
Test stochstic graph with ints
Python
bsd-3-clause
blublud/networkx,dhimmel/networkx,nathania/networkx,goulu/networkx,ltiao/networkx,ionanrozenfeld/networkx,bzero/networkx,dmoliveira/networkx,aureooms/networkx,yashu-seth/networkx,ghdk/networkx,ionanrozenfeld/networkx,nathania/networkx,harlowja/networkx,michaelpacer/networkx,ionanrozenfeld/networkx,aureooms/networkx,harlowja/networkx,bzero/networkx,beni55/networkx,OrkoHunter/networkx,jni/networkx,kernc/networkx,NvanAdrichem/networkx,jcurbelo/networkx,sharifulgeo/networkx,dhimmel/networkx,jakevdp/networkx,jfinkels/networkx,farhaanbukhsh/networkx,nathania/networkx,debsankha/networkx,farhaanbukhsh/networkx,sharifulgeo/networkx,RMKD/networkx,Sixshaman/networkx,RMKD/networkx,aureooms/networkx,blublud/networkx,jakevdp/networkx,kernc/networkx,chrisnatali/networkx,jni/networkx,debsankha/networkx,harlowja/networkx,dmoliveira/networkx,tmilicic/networkx,kernc/networkx,bzero/networkx,wasade/networkx,jni/networkx,blublud/networkx,ghdk/networkx,jakevdp/networkx,sharifulgeo/networkx,JamesClough/networkx,chrisnatali/networkx,farhaanbukhsh/networkx,dmoliveira/networkx,cmtm/networkx,ghdk/networkx,dhimmel/networkx,chrisnatali/networkx,RMKD/networkx,andnovar/networkx,SanketDG/networkx,debsankha/networkx
from nose.tools import assert_true, assert_equal,assert_raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + def test_stochastic_ints(): + G=nx.DiGraph() + G.add_edge(0,1,weight=1) + G.add_edge(0,2,weight=1) + S=nx.stochastic_graph(G) + assert_equal(sorted(S.edges(data=True)), + [(0, 1, {'weight': 0.5}), + (0, 2, {'weight': 0.5})]) + + def test_stochastic_error(): G=nx.Graph() assert_raises(Exception,nx.stochastic_graph,G) G=nx.MultiGraph() assert_raises(Exception,nx.stochastic_graph,G)
Test stochstic graph with ints
## Code Before: from nose.tools import assert_true, assert_equal,assert_raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_error(): G=nx.Graph() assert_raises(Exception,nx.stochastic_graph,G) G=nx.MultiGraph() assert_raises(Exception,nx.stochastic_graph,G) ## Instruction: Test stochstic graph with ints ## Code After: from nose.tools import assert_true, assert_equal,assert_raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_ints(): G=nx.DiGraph() G.add_edge(0,1,weight=1) G.add_edge(0,2,weight=1) S=nx.stochastic_graph(G) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_error(): G=nx.Graph() assert_raises(Exception,nx.stochastic_graph,G) G=nx.MultiGraph() assert_raises(Exception,nx.stochastic_graph,G)
from nose.tools import assert_true, assert_equal,assert_raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) + def test_stochastic_ints(): + G=nx.DiGraph() + G.add_edge(0,1,weight=1) + G.add_edge(0,2,weight=1) + S=nx.stochastic_graph(G) + assert_equal(sorted(S.edges(data=True)), + [(0, 1, {'weight': 0.5}), + (0, 2, {'weight': 0.5})]) + + def test_stochastic_error(): G=nx.Graph() assert_raises(Exception,nx.stochastic_graph,G) G=nx.MultiGraph() assert_raises(Exception,nx.stochastic_graph,G)
1a0fe91b6ab9a90573b0f35d6ff81e7f0523acb4
praw/util/__init__.py
praw/util/__init__.py
"""Package imports for utilities.""" __all__ = ("cache",)
"""Package imports for utilities.""" import re __all__ = ("cache", "camel_to_snake", "snake_case_keys") _re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))") def camel_to_snake(name): """Convert `name` from camelCase to snake_case.""" return _re_camel_to_snake.sub(r"\1_", name).lower() def snake_case_keys(dictionary): """Return a new dictionary with keys converted to snake_case. :param dictionary: The dict to be corrected. """ return {camel_to_snake(k): v for k, v in dictionary.items()}
Add `camel_to_snake()` and `snake_case_keys()` to praw.util
Add `camel_to_snake()` and `snake_case_keys()` to praw.util
Python
bsd-2-clause
praw-dev/praw,gschizas/praw,leviroth/praw,praw-dev/praw,gschizas/praw,leviroth/praw
"""Package imports for utilities.""" - __all__ = ("cache",) + import re + __all__ = ("cache", "camel_to_snake", "snake_case_keys") + + _re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))") + + + def camel_to_snake(name): + """Convert `name` from camelCase to snake_case.""" + return _re_camel_to_snake.sub(r"\1_", name).lower() + + + def snake_case_keys(dictionary): + """Return a new dictionary with keys converted to snake_case. + + :param dictionary: The dict to be corrected. + + """ + return {camel_to_snake(k): v for k, v in dictionary.items()} +
Add `camel_to_snake()` and `snake_case_keys()` to praw.util
## Code Before: """Package imports for utilities.""" __all__ = ("cache",) ## Instruction: Add `camel_to_snake()` and `snake_case_keys()` to praw.util ## Code After: """Package imports for utilities.""" import re __all__ = ("cache", "camel_to_snake", "snake_case_keys") _re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))") def camel_to_snake(name): """Convert `name` from camelCase to snake_case.""" return _re_camel_to_snake.sub(r"\1_", name).lower() def snake_case_keys(dictionary): """Return a new dictionary with keys converted to snake_case. :param dictionary: The dict to be corrected. """ return {camel_to_snake(k): v for k, v in dictionary.items()}
"""Package imports for utilities.""" - __all__ = ("cache",) + import re + + __all__ = ("cache", "camel_to_snake", "snake_case_keys") + + _re_camel_to_snake = re.compile(r"([a-z0-9](?=[A-Z])|[A-Z](?=[A-Z][a-z]))") + + + def camel_to_snake(name): + """Convert `name` from camelCase to snake_case.""" + return _re_camel_to_snake.sub(r"\1_", name).lower() + + + def snake_case_keys(dictionary): + """Return a new dictionary with keys converted to snake_case. + + :param dictionary: The dict to be corrected. + + """ + return {camel_to_snake(k): v for k, v in dictionary.items()}
d13b3d89124d03f563c2ee2143ae16eec7d0b191
tests/Epsilon_tests/ImportTest.py
tests/Epsilon_tests/ImportTest.py
from unittest import TestCase, main from grammpy import EPS from grammpy import EPSILON class ImportTest(TestCase): def test_idSame(self): self.assertEqual(id(EPS), id(EPSILON)) def test_equal(self): self.assertEqual(EPS, EPSILON) def test_equalToSelf(self): self.assertEqual(EPS, EPS) def test_notEqualToNumber(self): self.assertNotEqual(EPS, 5) def test_notEqualToString(self): self.assertNotEqual(EPS, "asdf") def test_notEqualToObject(self): self.assertNotEqual(EPS, object()) if __name__ == '__main__': main()
from unittest import TestCase, main from grammpy import EPS from grammpy import EPSILON class ImportTest(TestCase): def test_idSame(self): self.assertEqual(id(EPS),id(EPSILON)) def test_equal(self): self.assertEqual(EPS, EPSILON) if __name__ == '__main__': main()
Revert "Add tests to compare epsilon with another objects"
Revert "Add tests to compare epsilon with another objects" This reverts commit ae4b4fe5fb5c5774720dd3a14549aa88bde91043.
Python
mit
PatrikValkovic/grammpy
from unittest import TestCase, main from grammpy import EPS from grammpy import EPSILON class ImportTest(TestCase): + def test_idSame(self): - self.assertEqual(id(EPS), id(EPSILON)) + self.assertEqual(id(EPS),id(EPSILON)) def test_equal(self): self.assertEqual(EPS, EPSILON) - def test_equalToSelf(self): - self.assertEqual(EPS, EPS) - - def test_notEqualToNumber(self): - self.assertNotEqual(EPS, 5) - - def test_notEqualToString(self): - self.assertNotEqual(EPS, "asdf") - - def test_notEqualToObject(self): - self.assertNotEqual(EPS, object()) - - if __name__ == '__main__': main()
Revert "Add tests to compare epsilon with another objects"
## Code Before: from unittest import TestCase, main from grammpy import EPS from grammpy import EPSILON class ImportTest(TestCase): def test_idSame(self): self.assertEqual(id(EPS), id(EPSILON)) def test_equal(self): self.assertEqual(EPS, EPSILON) def test_equalToSelf(self): self.assertEqual(EPS, EPS) def test_notEqualToNumber(self): self.assertNotEqual(EPS, 5) def test_notEqualToString(self): self.assertNotEqual(EPS, "asdf") def test_notEqualToObject(self): self.assertNotEqual(EPS, object()) if __name__ == '__main__': main() ## Instruction: Revert "Add tests to compare epsilon with another objects" ## Code After: from unittest import TestCase, main from grammpy import EPS from grammpy import EPSILON class ImportTest(TestCase): def test_idSame(self): self.assertEqual(id(EPS),id(EPSILON)) def test_equal(self): self.assertEqual(EPS, EPSILON) if __name__ == '__main__': main()
from unittest import TestCase, main from grammpy import EPS from grammpy import EPSILON class ImportTest(TestCase): + def test_idSame(self): - self.assertEqual(id(EPS), id(EPSILON)) ? - + self.assertEqual(id(EPS),id(EPSILON)) def test_equal(self): self.assertEqual(EPS, EPSILON) - def test_equalToSelf(self): - self.assertEqual(EPS, EPS) - - def test_notEqualToNumber(self): - self.assertNotEqual(EPS, 5) - - def test_notEqualToString(self): - self.assertNotEqual(EPS, "asdf") - - def test_notEqualToObject(self): - self.assertNotEqual(EPS, object()) - - if __name__ == '__main__': main()
f1e50c1caeeec5b8e443f634534bfed46f26dbdf
2017/async-socket-server/simple-client.py
2017/async-socket-server/simple-client.py
import sys, time import socket def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) sockobj.send(b'foo^1234$jo') sockobj.send(b'sdfsdfsdfsdf^a') sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') buf = b'' while True: buf += sockobj.recv(1024) print(buf) sockobj.close() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
import sys, time import socket import threading class ReadThread(threading.Thread): def __init__(self, sockobj): super().__init__() self.sockobj = sockobj self.bufsize = 8 * 1024 def run(self): while True: buf = self.sockobj.recv(self.bufsize) print('Received:', buf) if b'1111' in buf: break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) rthread = ReadThread(sockobj) rthread.start() sockobj.send(b'foo^1234$jo') time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') time.sleep(1.0) sockobj.close() rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
Modify client to read the socket concurrently
Modify client to read the socket concurrently
Python
unlicense
eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog
import sys, time import socket + import threading + + + class ReadThread(threading.Thread): + def __init__(self, sockobj): + super().__init__() + self.sockobj = sockobj + self.bufsize = 8 * 1024 + + def run(self): + while True: + buf = self.sockobj.recv(self.bufsize) + print('Received:', buf) + if b'1111' in buf: + break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) + rthread = ReadThread(sockobj) + rthread.start() sockobj.send(b'foo^1234$jo') + time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') + time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') + time.sleep(1.0) - - buf = b'' - while True: - buf += sockobj.recv(1024) - print(buf) sockobj.close() + rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
Modify client to read the socket concurrently
## Code Before: import sys, time import socket def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) sockobj.send(b'foo^1234$jo') sockobj.send(b'sdfsdfsdfsdf^a') sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') buf = b'' while True: buf += sockobj.recv(1024) print(buf) sockobj.close() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main() ## Instruction: Modify client to read the socket concurrently ## Code After: import sys, time import socket import threading class ReadThread(threading.Thread): def __init__(self, sockobj): super().__init__() self.sockobj = sockobj self.bufsize = 8 * 1024 def run(self): while True: buf = self.sockobj.recv(self.bufsize) print('Received:', buf) if b'1111' in buf: break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) rthread = ReadThread(sockobj) rthread.start() sockobj.send(b'foo^1234$jo') time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') time.sleep(1.0) sockobj.close() rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
import sys, time import socket + import threading + + + class ReadThread(threading.Thread): + def __init__(self, sockobj): + super().__init__() + self.sockobj = sockobj + self.bufsize = 8 * 1024 + + def run(self): + while True: + buf = self.sockobj.recv(self.bufsize) + print('Received:', buf) + if b'1111' in buf: + break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) + rthread = ReadThread(sockobj) + rthread.start() sockobj.send(b'foo^1234$jo') + time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') + time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') + time.sleep(1.0) - - buf = b'' - while True: - buf += sockobj.recv(1024) - print(buf) sockobj.close() + rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
93ed6f7db7060893214571bf5ec8a633fffa48ab
python/completers/cpp/clang_helpers.py
python/completers/cpp/clang_helpers.py
import os # Given an iterable object that produces strings (flags for Clang), removes the # '-c' and '-o' options that Clang does not like to see when it's producing # completions for a file. def PrepareClangFlags( flags, filename ): new_flags = [] skip = True for flag in flags: if skip: skip = False continue if flag == '-c': skip = True; continue if flag == '-o': skip = True; continue if flag == filename or os.path.realpath(flag) == filename: continue new_flags.append( flag ) return new_flags
import os # Given an iterable object that produces strings (flags for Clang), removes the # '-c' and '-o' options that Clang does not like to see when it's producing # completions for a file. def PrepareClangFlags( flags, filename ): new_flags = [] skip = True for flag in flags: if skip: skip = False continue if flag == '-c': continue if flag == '-o': skip = True; continue if flag == filename or os.path.realpath(flag) == filename: continue new_flags.append( flag ) return new_flags
Fix bug with removing flag after "-c"
Fix bug with removing flag after "-c" -c does not take an argument. Why did I think it did?
Python
mit
nikmartin/dotfiles
import os # Given an iterable object that produces strings (flags for Clang), removes the # '-c' and '-o' options that Clang does not like to see when it's producing # completions for a file. def PrepareClangFlags( flags, filename ): new_flags = [] skip = True for flag in flags: if skip: skip = False continue if flag == '-c': - skip = True; continue if flag == '-o': skip = True; continue if flag == filename or os.path.realpath(flag) == filename: continue new_flags.append( flag ) return new_flags
Fix bug with removing flag after "-c"
## Code Before: import os # Given an iterable object that produces strings (flags for Clang), removes the # '-c' and '-o' options that Clang does not like to see when it's producing # completions for a file. def PrepareClangFlags( flags, filename ): new_flags = [] skip = True for flag in flags: if skip: skip = False continue if flag == '-c': skip = True; continue if flag == '-o': skip = True; continue if flag == filename or os.path.realpath(flag) == filename: continue new_flags.append( flag ) return new_flags ## Instruction: Fix bug with removing flag after "-c" ## Code After: import os # Given an iterable object that produces strings (flags for Clang), removes the # '-c' and '-o' options that Clang does not like to see when it's producing # completions for a file. def PrepareClangFlags( flags, filename ): new_flags = [] skip = True for flag in flags: if skip: skip = False continue if flag == '-c': continue if flag == '-o': skip = True; continue if flag == filename or os.path.realpath(flag) == filename: continue new_flags.append( flag ) return new_flags
import os # Given an iterable object that produces strings (flags for Clang), removes the # '-c' and '-o' options that Clang does not like to see when it's producing # completions for a file. def PrepareClangFlags( flags, filename ): new_flags = [] skip = True for flag in flags: if skip: skip = False continue if flag == '-c': - skip = True; continue if flag == '-o': skip = True; continue if flag == filename or os.path.realpath(flag) == filename: continue new_flags.append( flag ) return new_flags
e6b5c93a8c23fcea84768a8b50708ef7ef78dcd8
functionaltests/api/base.py
functionaltests/api/base.py
from tempest import clients from tempest.common import rest_client from tempest import config import testtools CONF = config.CONF class SolumClient(rest_client.RestClient): def __init__(self, auth_provider): super(SolumClient, self).__init__(auth_provider) self.service = 'application_deployment' self.endpoint_url = 'publicURL' class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password tenant_name = CONF.identity.tenant_name mgr = clients.Manager(username, password, tenant_name) auth_provider = mgr.get_auth_provider() self.client = SolumClient(auth_provider)
from tempest import clients from tempest.common import rest_client from tempest import config import testtools CONF = config.CONF class SolumClient(rest_client.RestClient): def __init__(self, auth_provider): super(SolumClient, self).__init__(auth_provider) self.service = 'application_deployment' self.endpoint_url = 'publicURL' class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password tenant_name = CONF.identity.tenant_name mgr = clients.Manager(username, password, tenant_name) auth_provider = mgr.get_auth_provider(mgr.get_default_credentials()) self.client = SolumClient(auth_provider)
Fix functionaltests (imported tempest code has changed)
Fix functionaltests (imported tempest code has changed) get_auth_provider now takes an argument. Change-Id: I4a80ef3fdf2914854268459cf1080a46922e93d5
Python
apache-2.0
gilbertpilz/solum,ed-/solum,ed-/solum,gilbertpilz/solum,openstack/solum,devdattakulkarni/test-solum,gilbertpilz/solum,stackforge/solum,openstack/solum,ed-/solum,stackforge/solum,devdattakulkarni/test-solum,ed-/solum,gilbertpilz/solum
from tempest import clients from tempest.common import rest_client from tempest import config import testtools CONF = config.CONF class SolumClient(rest_client.RestClient): def __init__(self, auth_provider): super(SolumClient, self).__init__(auth_provider) self.service = 'application_deployment' self.endpoint_url = 'publicURL' class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password tenant_name = CONF.identity.tenant_name mgr = clients.Manager(username, password, tenant_name) - auth_provider = mgr.get_auth_provider() + auth_provider = mgr.get_auth_provider(mgr.get_default_credentials()) self.client = SolumClient(auth_provider)
Fix functionaltests (imported tempest code has changed)
## Code Before: from tempest import clients from tempest.common import rest_client from tempest import config import testtools CONF = config.CONF class SolumClient(rest_client.RestClient): def __init__(self, auth_provider): super(SolumClient, self).__init__(auth_provider) self.service = 'application_deployment' self.endpoint_url = 'publicURL' class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password tenant_name = CONF.identity.tenant_name mgr = clients.Manager(username, password, tenant_name) auth_provider = mgr.get_auth_provider() self.client = SolumClient(auth_provider) ## Instruction: Fix functionaltests (imported tempest code has changed) ## Code After: from tempest import clients from tempest.common import rest_client from tempest import config import testtools CONF = config.CONF class SolumClient(rest_client.RestClient): def __init__(self, auth_provider): super(SolumClient, self).__init__(auth_provider) self.service = 'application_deployment' self.endpoint_url = 'publicURL' class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password tenant_name = CONF.identity.tenant_name mgr = clients.Manager(username, password, tenant_name) auth_provider = mgr.get_auth_provider(mgr.get_default_credentials()) self.client = SolumClient(auth_provider)
from tempest import clients from tempest.common import rest_client from tempest import config import testtools CONF = config.CONF class SolumClient(rest_client.RestClient): def __init__(self, auth_provider): super(SolumClient, self).__init__(auth_provider) self.service = 'application_deployment' self.endpoint_url = 'publicURL' class TestCase(testtools.TestCase): def setUp(self): super(TestCase, self).setUp() username = CONF.identity.username password = CONF.identity.password tenant_name = CONF.identity.tenant_name mgr = clients.Manager(username, password, tenant_name) - auth_provider = mgr.get_auth_provider() + auth_provider = mgr.get_auth_provider(mgr.get_default_credentials()) ? ++++++++++++++++++++++++++++ + self.client = SolumClient(auth_provider)
0e044d1ad8b6fb2b0ac2126bb0fccfa05de9da14
file_transfer/datamover/__init__.py
file_transfer/datamover/__init__.py
from .connectors import (GithubConnector, S3Connector, FTPConnector, LocalConnector) from .transporters import (BaltradToS3, LocalToS3) from .s3enram import S3EnramHandler from .utils import (parse_filename, extract_month_updates, parse_coverage_month, coverage_to_csv)
from .connectors import (GithubConnector, S3Connector, FTPConnector, LocalConnector) from .transporters import (BaltradToS3, LocalToS3) from .s3enram import S3EnramHandler from .utils import (parse_filename, extract_month_updates, parse_coverage_month, coverage_to_csv, most_recent_to_csv)
Add csv handling to module
Add csv handling to module
Python
mit
enram/data-repository,enram/data-repository,enram/data-repository,enram/infrastructure,enram/data-repository,enram/infrastructure
from .connectors import (GithubConnector, S3Connector, FTPConnector, LocalConnector) from .transporters import (BaltradToS3, LocalToS3) from .s3enram import S3EnramHandler from .utils import (parse_filename, extract_month_updates, - parse_coverage_month, coverage_to_csv) + parse_coverage_month, coverage_to_csv, + most_recent_to_csv)
Add csv handling to module
## Code Before: from .connectors import (GithubConnector, S3Connector, FTPConnector, LocalConnector) from .transporters import (BaltradToS3, LocalToS3) from .s3enram import S3EnramHandler from .utils import (parse_filename, extract_month_updates, parse_coverage_month, coverage_to_csv) ## Instruction: Add csv handling to module ## Code After: from .connectors import (GithubConnector, S3Connector, FTPConnector, LocalConnector) from .transporters import (BaltradToS3, LocalToS3) from .s3enram import S3EnramHandler from .utils import (parse_filename, extract_month_updates, parse_coverage_month, coverage_to_csv, most_recent_to_csv)
from .connectors import (GithubConnector, S3Connector, FTPConnector, LocalConnector) from .transporters import (BaltradToS3, LocalToS3) from .s3enram import S3EnramHandler from .utils import (parse_filename, extract_month_updates, - parse_coverage_month, coverage_to_csv) ? ^ + parse_coverage_month, coverage_to_csv, ? ^ + most_recent_to_csv)
7ab7154c1393491bd2874484e02c6af6eb3bb7e7
tests/test_functional.py
tests/test_functional.py
from diffenator.diff import diff_fonts from itertools import permutations import collections from glob import glob import os import unittest class TestFunctionality(unittest.TestCase): def setUp(self): _path = os.path.dirname(__file__) font_paths = glob(os.path.join(_path, 'data', '*.ttf')) self.font_path_combos = permutations(font_paths, r=2) def test_diff(self): for font_a_path, font_b_path in self.font_path_combos: diff = diff_fonts(font_a_path, font_b_path) self.assertNotEqual(diff, collections.defaultdict(dict)) if __name__ == '__main__': unittest.main()
from diffenator.diff import diff_fonts from diffenator.font import InputFont from itertools import permutations import collections from glob import glob import os import unittest class TestFunctionality(unittest.TestCase): def setUp(self): _path = os.path.dirname(__file__) font_paths = glob(os.path.join(_path, 'data', '*.ttf')) self.font_path_combos = permutations(font_paths, r=2) def test_diff(self): for font_a_path, font_b_path in self.font_path_combos: font_a = InputFont(font_a_path) font_b = InputFont(font_b_path) diff = diff_fonts(font_a, font_b) self.assertNotEqual(diff, collections.defaultdict(dict)) if __name__ == '__main__': unittest.main()
Call diff_fonts with correct params
Call diff_fonts with correct params
Python
apache-2.0
googlefonts/fontdiffenator,googlefonts/fontdiffenator
from diffenator.diff import diff_fonts + from diffenator.font import InputFont from itertools import permutations import collections from glob import glob import os import unittest class TestFunctionality(unittest.TestCase): def setUp(self): _path = os.path.dirname(__file__) font_paths = glob(os.path.join(_path, 'data', '*.ttf')) self.font_path_combos = permutations(font_paths, r=2) def test_diff(self): for font_a_path, font_b_path in self.font_path_combos: + font_a = InputFont(font_a_path) + font_b = InputFont(font_b_path) - diff = diff_fonts(font_a_path, font_b_path) + diff = diff_fonts(font_a, font_b) self.assertNotEqual(diff, collections.defaultdict(dict)) if __name__ == '__main__': unittest.main()
Call diff_fonts with correct params
## Code Before: from diffenator.diff import diff_fonts from itertools import permutations import collections from glob import glob import os import unittest class TestFunctionality(unittest.TestCase): def setUp(self): _path = os.path.dirname(__file__) font_paths = glob(os.path.join(_path, 'data', '*.ttf')) self.font_path_combos = permutations(font_paths, r=2) def test_diff(self): for font_a_path, font_b_path in self.font_path_combos: diff = diff_fonts(font_a_path, font_b_path) self.assertNotEqual(diff, collections.defaultdict(dict)) if __name__ == '__main__': unittest.main() ## Instruction: Call diff_fonts with correct params ## Code After: from diffenator.diff import diff_fonts from diffenator.font import InputFont from itertools import permutations import collections from glob import glob import os import unittest class TestFunctionality(unittest.TestCase): def setUp(self): _path = os.path.dirname(__file__) font_paths = glob(os.path.join(_path, 'data', '*.ttf')) self.font_path_combos = permutations(font_paths, r=2) def test_diff(self): for font_a_path, font_b_path in self.font_path_combos: font_a = InputFont(font_a_path) font_b = InputFont(font_b_path) diff = diff_fonts(font_a, font_b) self.assertNotEqual(diff, collections.defaultdict(dict)) if __name__ == '__main__': unittest.main()
from diffenator.diff import diff_fonts + from diffenator.font import InputFont from itertools import permutations import collections from glob import glob import os import unittest class TestFunctionality(unittest.TestCase): def setUp(self): _path = os.path.dirname(__file__) font_paths = glob(os.path.join(_path, 'data', '*.ttf')) self.font_path_combos = permutations(font_paths, r=2) def test_diff(self): for font_a_path, font_b_path in self.font_path_combos: + font_a = InputFont(font_a_path) + font_b = InputFont(font_b_path) - diff = diff_fonts(font_a_path, font_b_path) ? ----- ----- + diff = diff_fonts(font_a, font_b) self.assertNotEqual(diff, collections.defaultdict(dict)) if __name__ == '__main__': unittest.main()
353ad2e4d03d5ad5a8c5a1e949e8cd3251c7d85b
holviapi/tests/test_api_idempotent.py
holviapi/tests/test_api_idempotent.py
import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice
import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice def test_get_invoice(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice i2 = invoiceapi.get_invoice(i.code) assert i.code == i2.code
Test getting invoice by code
Test getting invoice by code
Python
mit
rambo/python-holviapi,rambo/python-holviapi
import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice + def test_get_invoice(invoiceapi): + l = invoiceapi.list_invoices() + i = next(l) + assert type(i) == holviapi.Invoice + i2 = invoiceapi.get_invoice(i.code) + assert i.code == i2.code +
Test getting invoice by code
## Code Before: import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice ## Instruction: Test getting invoice by code ## Code After: import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice def test_get_invoice(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice i2 = invoiceapi.get_invoice(i.code) assert i.code == i2.code
import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice + + def test_get_invoice(invoiceapi): + l = invoiceapi.list_invoices() + i = next(l) + assert type(i) == holviapi.Invoice + i2 = invoiceapi.get_invoice(i.code) + assert i.code == i2.code
d6444cd75c8ce6babd373989abd3507dd14923bb
api.py
api.py
import json from tornado.httpserver import HTTPServer from tornado.web import URLSpec import tornado.ioloop import tornado.web from tornado.options import define, options from orders import Book, Buy, Sell define("port", default=3000, help="run on the given port", type=int) Book() class BookHandler(tornado.web.RequestHandler): def get(self): ret = json.dumps(Book().orders()) self.write(ret) class OrderHandler(tornado.web.RequestHandler): def post(self, **kwargs): order = None body = json.loads(self.request.body) if self.request.uri == "/buy": order = Buy(**body) if self.request.uri == "/sell": order = Sell(**body) fills = Book().match(order) self.write(json.dumps(fills)) def main(): tornado.options.parse_command_line() application = tornado.web.Application([ URLSpec(r"/book", BookHandler, name="book"), URLSpec(r"/buy", OrderHandler, name="buy"), URLSpec(r"/sell", OrderHandler, name="sell"), ]) http_server = HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
import json from tornado.httpserver import HTTPServer from tornado.httputil import HTTPHeaders from tornado.web import URLSpec import tornado.ioloop import tornado.web from tornado.options import define, options from orders import Book, Buy, Sell define("port", default=3000, help="run on the given port", type=int) Book() class BookHandler(tornado.web.RequestHandler): def get(self): ret = json.dumps(Book().orders()) self.write(ret) class OrderHandler(tornado.web.RequestHandler): def post(self, **kwargs): order = None body = json.loads(self.request.body) if self.request.uri == "/buy": order = Buy(**body) if self.request.uri == "/sell": order = Sell(**body) fills = Book().match(order) self.set_header("content-type", "application/json") self.set_header("location", "{}://{}{}".format(self.request.protocol, self.request.host, self.reverse_url("book"))) self.write(json.dumps(fills)) def main(): tornado.options.parse_command_line() application = tornado.web.Application([ URLSpec(r"/book", BookHandler, name="book"), URLSpec(r"/buy", OrderHandler, name="buy"), URLSpec(r"/sell", OrderHandler, name="sell"), ]) http_server = HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
Add location and content-type headers to response.
Add location and content-type headers to response.
Python
mit
eigenholser/ddme,eigenholser/ddme
import json from tornado.httpserver import HTTPServer + from tornado.httputil import HTTPHeaders from tornado.web import URLSpec import tornado.ioloop import tornado.web from tornado.options import define, options from orders import Book, Buy, Sell define("port", default=3000, help="run on the given port", type=int) Book() class BookHandler(tornado.web.RequestHandler): def get(self): ret = json.dumps(Book().orders()) self.write(ret) class OrderHandler(tornado.web.RequestHandler): def post(self, **kwargs): order = None body = json.loads(self.request.body) if self.request.uri == "/buy": order = Buy(**body) if self.request.uri == "/sell": order = Sell(**body) fills = Book().match(order) + self.set_header("content-type", "application/json") + self.set_header("location", "{}://{}{}".format(self.request.protocol, + self.request.host, self.reverse_url("book"))) self.write(json.dumps(fills)) def main(): tornado.options.parse_command_line() application = tornado.web.Application([ URLSpec(r"/book", BookHandler, name="book"), URLSpec(r"/buy", OrderHandler, name="buy"), URLSpec(r"/sell", OrderHandler, name="sell"), ]) http_server = HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
Add location and content-type headers to response.
## Code Before: import json from tornado.httpserver import HTTPServer from tornado.web import URLSpec import tornado.ioloop import tornado.web from tornado.options import define, options from orders import Book, Buy, Sell define("port", default=3000, help="run on the given port", type=int) Book() class BookHandler(tornado.web.RequestHandler): def get(self): ret = json.dumps(Book().orders()) self.write(ret) class OrderHandler(tornado.web.RequestHandler): def post(self, **kwargs): order = None body = json.loads(self.request.body) if self.request.uri == "/buy": order = Buy(**body) if self.request.uri == "/sell": order = Sell(**body) fills = Book().match(order) self.write(json.dumps(fills)) def main(): tornado.options.parse_command_line() application = tornado.web.Application([ URLSpec(r"/book", BookHandler, name="book"), URLSpec(r"/buy", OrderHandler, name="buy"), URLSpec(r"/sell", OrderHandler, name="sell"), ]) http_server = HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main() ## Instruction: Add location and content-type headers to response. ## Code After: import json from tornado.httpserver import HTTPServer from tornado.httputil import HTTPHeaders from tornado.web import URLSpec import tornado.ioloop import tornado.web from tornado.options import define, options from orders import Book, Buy, Sell define("port", default=3000, help="run on the given port", type=int) Book() class BookHandler(tornado.web.RequestHandler): def get(self): ret = json.dumps(Book().orders()) self.write(ret) class OrderHandler(tornado.web.RequestHandler): def post(self, **kwargs): order = None body = json.loads(self.request.body) if self.request.uri == "/buy": order = Buy(**body) if self.request.uri == "/sell": order = Sell(**body) fills = Book().match(order) self.set_header("content-type", "application/json") self.set_header("location", "{}://{}{}".format(self.request.protocol, self.request.host, self.reverse_url("book"))) self.write(json.dumps(fills)) def main(): tornado.options.parse_command_line() application = tornado.web.Application([ URLSpec(r"/book", BookHandler, name="book"), URLSpec(r"/buy", OrderHandler, name="buy"), URLSpec(r"/sell", OrderHandler, name="sell"), ]) http_server = HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
import json from tornado.httpserver import HTTPServer + from tornado.httputil import HTTPHeaders from tornado.web import URLSpec import tornado.ioloop import tornado.web from tornado.options import define, options from orders import Book, Buy, Sell define("port", default=3000, help="run on the given port", type=int) Book() class BookHandler(tornado.web.RequestHandler): def get(self): ret = json.dumps(Book().orders()) self.write(ret) class OrderHandler(tornado.web.RequestHandler): def post(self, **kwargs): order = None body = json.loads(self.request.body) if self.request.uri == "/buy": order = Buy(**body) if self.request.uri == "/sell": order = Sell(**body) fills = Book().match(order) + self.set_header("content-type", "application/json") + self.set_header("location", "{}://{}{}".format(self.request.protocol, + self.request.host, self.reverse_url("book"))) self.write(json.dumps(fills)) def main(): tornado.options.parse_command_line() application = tornado.web.Application([ URLSpec(r"/book", BookHandler, name="book"), URLSpec(r"/buy", OrderHandler, name="buy"), URLSpec(r"/sell", OrderHandler, name="sell"), ]) http_server = HTTPServer(application) http_server.listen(options.port) tornado.ioloop.IOLoop.current().start() if __name__ == "__main__": main()
9516115f722fb3f95882553d8077bf1ab4a670ef
examples/web_demo/exifutil.py
examples/web_demo/exifutil.py
from PIL import Image import numpy as np ORIENTATIONS = { # used in apply_orientation 2: (Image.FLIP_LEFT_RIGHT,), 3: (Image.ROTATE_180,), 4: (Image.FLIP_TOP_BOTTOM,), 5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90), 6: (Image.ROTATE_270,), 7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270), 8: (Image.ROTATE_90,) } def open_oriented_im(im_path): im = Image.open(im_path) if hasattr(im, '_getexif'): exif = im._getexif() if exif is not None and 274 in exif: orientation = exif[274] im = apply_orientation(im, orientation) return np.asarray(im).astype(np.float32) / 255. def apply_orientation(im, orientation): if orientation in ORIENTATIONS: for method in ORIENTATIONS[orientation]: im = im.transpose(method) return im
from PIL import Image import numpy as np ORIENTATIONS = { # used in apply_orientation 2: (Image.FLIP_LEFT_RIGHT,), 3: (Image.ROTATE_180,), 4: (Image.FLIP_TOP_BOTTOM,), 5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90), 6: (Image.ROTATE_270,), 7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270), 8: (Image.ROTATE_90,) } def open_oriented_im(im_path): im = Image.open(im_path) if hasattr(im, '_getexif'): exif = im._getexif() if exif is not None and 274 in exif: orientation = exif[274] im = apply_orientation(im, orientation) img = np.asarray(im).astype(np.float32) / 255. if img.ndim == 2: img = img[:, :, np.newaxis] img = np.tile(img, (1, 1, 3)) elif img.shape[2] == 4: img = img[:, :, :3] return img def apply_orientation(im, orientation): if orientation in ORIENTATIONS: for method in ORIENTATIONS[orientation]: im = im.transpose(method) return im
FIX web_demo upload was not processing grayscale correctly
FIX web_demo upload was not processing grayscale correctly
Python
bsd-2-clause
wangg12/caffe,longjon/caffe,gnina/gnina,gogartom/caffe-textmaps,wangg12/caffe,tackgeun/caffe,longjon/caffe,wangg12/caffe,tackgeun/caffe,tackgeun/caffe,gogartom/caffe-textmaps,gnina/gnina,tackgeun/caffe,gnina/gnina,CZCV/s-dilation-caffe,gnina/gnina,wangg12/caffe,longjon/caffe,gnina/gnina,gogartom/caffe-textmaps,CZCV/s-dilation-caffe,longjon/caffe,CZCV/s-dilation-caffe,CZCV/s-dilation-caffe,gnina/gnina,gogartom/caffe-textmaps
from PIL import Image import numpy as np ORIENTATIONS = { # used in apply_orientation 2: (Image.FLIP_LEFT_RIGHT,), 3: (Image.ROTATE_180,), 4: (Image.FLIP_TOP_BOTTOM,), 5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90), 6: (Image.ROTATE_270,), 7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270), 8: (Image.ROTATE_90,) } def open_oriented_im(im_path): im = Image.open(im_path) if hasattr(im, '_getexif'): exif = im._getexif() if exif is not None and 274 in exif: orientation = exif[274] im = apply_orientation(im, orientation) - return np.asarray(im).astype(np.float32) / 255. + img = np.asarray(im).astype(np.float32) / 255. + if img.ndim == 2: + img = img[:, :, np.newaxis] + img = np.tile(img, (1, 1, 3)) + elif img.shape[2] == 4: + img = img[:, :, :3] + return img def apply_orientation(im, orientation): if orientation in ORIENTATIONS: for method in ORIENTATIONS[orientation]: im = im.transpose(method) return im
FIX web_demo upload was not processing grayscale correctly
## Code Before: from PIL import Image import numpy as np ORIENTATIONS = { # used in apply_orientation 2: (Image.FLIP_LEFT_RIGHT,), 3: (Image.ROTATE_180,), 4: (Image.FLIP_TOP_BOTTOM,), 5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90), 6: (Image.ROTATE_270,), 7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270), 8: (Image.ROTATE_90,) } def open_oriented_im(im_path): im = Image.open(im_path) if hasattr(im, '_getexif'): exif = im._getexif() if exif is not None and 274 in exif: orientation = exif[274] im = apply_orientation(im, orientation) return np.asarray(im).astype(np.float32) / 255. def apply_orientation(im, orientation): if orientation in ORIENTATIONS: for method in ORIENTATIONS[orientation]: im = im.transpose(method) return im ## Instruction: FIX web_demo upload was not processing grayscale correctly ## Code After: from PIL import Image import numpy as np ORIENTATIONS = { # used in apply_orientation 2: (Image.FLIP_LEFT_RIGHT,), 3: (Image.ROTATE_180,), 4: (Image.FLIP_TOP_BOTTOM,), 5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90), 6: (Image.ROTATE_270,), 7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270), 8: (Image.ROTATE_90,) } def open_oriented_im(im_path): im = Image.open(im_path) if hasattr(im, '_getexif'): exif = im._getexif() if exif is not None and 274 in exif: orientation = exif[274] im = apply_orientation(im, orientation) img = np.asarray(im).astype(np.float32) / 255. if img.ndim == 2: img = img[:, :, np.newaxis] img = np.tile(img, (1, 1, 3)) elif img.shape[2] == 4: img = img[:, :, :3] return img def apply_orientation(im, orientation): if orientation in ORIENTATIONS: for method in ORIENTATIONS[orientation]: im = im.transpose(method) return im
from PIL import Image import numpy as np ORIENTATIONS = { # used in apply_orientation 2: (Image.FLIP_LEFT_RIGHT,), 3: (Image.ROTATE_180,), 4: (Image.FLIP_TOP_BOTTOM,), 5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90), 6: (Image.ROTATE_270,), 7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270), 8: (Image.ROTATE_90,) } def open_oriented_im(im_path): im = Image.open(im_path) if hasattr(im, '_getexif'): exif = im._getexif() if exif is not None and 274 in exif: orientation = exif[274] im = apply_orientation(im, orientation) - return np.asarray(im).astype(np.float32) / 255. ? ^^^^^^ + img = np.asarray(im).astype(np.float32) / 255. ? ^^^^^ + if img.ndim == 2: + img = img[:, :, np.newaxis] + img = np.tile(img, (1, 1, 3)) + elif img.shape[2] == 4: + img = img[:, :, :3] + return img def apply_orientation(im, orientation): if orientation in ORIENTATIONS: for method in ORIENTATIONS[orientation]: im = im.transpose(method) return im
56a89d57824d3bd25ac235a8e360d528edd9a7cf
test/factories/blogpost_factory.py
test/factories/blogpost_factory.py
from pybossa.model import db from pybossa.model.blogpost import Blogpost from . import BaseFactory, factory class BlogpostFactory(BaseFactory): FACTORY_FOR = Blogpost id = factory.Sequence(lambda n: n) title = u'Blogpost title' body = u'Blogpost body text' app = factory.SubFactory('factories.AppFactory') app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id) owner = factory.SelfAttribute('app.owner') user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id)
from pybossa.model import db from pybossa.model.blogpost import Blogpost from . import BaseFactory, factory class BlogpostFactory(BaseFactory): FACTORY_FOR = Blogpost id = factory.Sequence(lambda n: n) title = u'Blogpost title' body = u'Blogpost body text' app = factory.SubFactory('factories.AppFactory') app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id) owner = factory.SelfAttribute('app.owner') user_id = factory.LazyAttribute( lambda blogpost: blogpost.owner.id if blogpost.owner else None)
Fix for nullable author in blogpost factory
Fix for nullable author in blogpost factory
Python
agpl-3.0
OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,proyectos-analizo-info/pybossa-analizo-info,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,harihpr/tweetclickers,stefanhahmann/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,PyBossa/pybossa,Scifabric/pybossa,geotagx/pybossa,PyBossa/pybossa
from pybossa.model import db from pybossa.model.blogpost import Blogpost from . import BaseFactory, factory class BlogpostFactory(BaseFactory): FACTORY_FOR = Blogpost id = factory.Sequence(lambda n: n) title = u'Blogpost title' body = u'Blogpost body text' app = factory.SubFactory('factories.AppFactory') app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id) owner = factory.SelfAttribute('app.owner') - user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id) + user_id = factory.LazyAttribute( + lambda blogpost: blogpost.owner.id if blogpost.owner else None)
Fix for nullable author in blogpost factory
## Code Before: from pybossa.model import db from pybossa.model.blogpost import Blogpost from . import BaseFactory, factory class BlogpostFactory(BaseFactory): FACTORY_FOR = Blogpost id = factory.Sequence(lambda n: n) title = u'Blogpost title' body = u'Blogpost body text' app = factory.SubFactory('factories.AppFactory') app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id) owner = factory.SelfAttribute('app.owner') user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id) ## Instruction: Fix for nullable author in blogpost factory ## Code After: from pybossa.model import db from pybossa.model.blogpost import Blogpost from . import BaseFactory, factory class BlogpostFactory(BaseFactory): FACTORY_FOR = Blogpost id = factory.Sequence(lambda n: n) title = u'Blogpost title' body = u'Blogpost body text' app = factory.SubFactory('factories.AppFactory') app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id) owner = factory.SelfAttribute('app.owner') user_id = factory.LazyAttribute( lambda blogpost: blogpost.owner.id if blogpost.owner else None)
from pybossa.model import db from pybossa.model.blogpost import Blogpost from . import BaseFactory, factory class BlogpostFactory(BaseFactory): FACTORY_FOR = Blogpost id = factory.Sequence(lambda n: n) title = u'Blogpost title' body = u'Blogpost body text' app = factory.SubFactory('factories.AppFactory') app_id = factory.LazyAttribute(lambda blogpost: blogpost.app.id) owner = factory.SelfAttribute('app.owner') - user_id = factory.LazyAttribute(lambda blogpost: blogpost.owner.id) + user_id = factory.LazyAttribute( + lambda blogpost: blogpost.owner.id if blogpost.owner else None)
aee8d2911c3f19a9b748f21ae82592d823e0c57e
update.py
update.py
import os, subprocess os.chdir(os.path.dirname(os.path.abspath(__file__))) subprocess.call([ 'python', os.path.join('..', 'venus', 'planet.py'), 'planet.ini' ]) subprocess.call([ 'python', 'aws', 's3', 'sync', '--region', 'us-east-1', 'public/', 's3://tempura.8-p.info/' ])
import os, subprocess os.chdir(os.path.dirname(os.path.abspath(__file__))) subprocess.call([ 'python', os.path.join('..', 'venus', 'planet.py'), 'planet.ini' ]) subprocess.call([ 'python', 'aws', 's3', 'sync', '--region', 'us-east-1', '--acl', 'public-read', 'public/', 's3://tempura.8-p.info/' ])
Set ACL explicitly to make files readable
Set ACL explicitly to make files readable
Python
mit
kzys/planet-tempura
import os, subprocess os.chdir(os.path.dirname(os.path.abspath(__file__))) subprocess.call([ 'python', os.path.join('..', 'venus', 'planet.py'), 'planet.ini' ]) subprocess.call([ 'python', 'aws', 's3', 'sync', '--region', 'us-east-1', + '--acl', 'public-read', 'public/', 's3://tempura.8-p.info/' ])
Set ACL explicitly to make files readable
## Code Before: import os, subprocess os.chdir(os.path.dirname(os.path.abspath(__file__))) subprocess.call([ 'python', os.path.join('..', 'venus', 'planet.py'), 'planet.ini' ]) subprocess.call([ 'python', 'aws', 's3', 'sync', '--region', 'us-east-1', 'public/', 's3://tempura.8-p.info/' ]) ## Instruction: Set ACL explicitly to make files readable ## Code After: import os, subprocess os.chdir(os.path.dirname(os.path.abspath(__file__))) subprocess.call([ 'python', os.path.join('..', 'venus', 'planet.py'), 'planet.ini' ]) subprocess.call([ 'python', 'aws', 's3', 'sync', '--region', 'us-east-1', '--acl', 'public-read', 'public/', 's3://tempura.8-p.info/' ])
import os, subprocess os.chdir(os.path.dirname(os.path.abspath(__file__))) subprocess.call([ 'python', os.path.join('..', 'venus', 'planet.py'), 'planet.ini' ]) subprocess.call([ 'python', 'aws', 's3', 'sync', '--region', 'us-east-1', + '--acl', 'public-read', 'public/', 's3://tempura.8-p.info/' ])
90b9a1e6638fd638450b46c6b12439eeb8e40f90
cumulusci/tasks/github/tests/test_pull_request.py
cumulusci/tasks/github/tests/test_pull_request.py
import mock import unittest from cumulusci.core.config import ServiceConfig from cumulusci.core.config import TaskConfig from cumulusci.tasks.github import PullRequests from cumulusci.tests.util import create_project_config @mock.patch("cumulusci.tasks.github.base.get_github_api_for_user", mock.Mock()) class TestPullRequests(unittest.TestCase): project_config = create_project_config() project_config.keychain.set_service( "github", ServiceConfig( { "username": "TestUser", "password": "TestPass", "email": "[email protected]", } ), ) task_config = TaskConfig() task = PullRequests(project_config, task_config) repo = mock.Mock() repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] task.get_repo = mock.Mock(return_value=repo) task.logger = mock.Mock() task() task.logger.info.assert_called_with("#1: Test PR")
import mock import unittest from cumulusci.core.config import ServiceConfig from cumulusci.core.config import TaskConfig from cumulusci.tasks.github import PullRequests from cumulusci.tests.util import create_project_config class TestPullRequests(unittest.TestCase): def test_run_task(self): project_config = create_project_config() project_config.keychain.set_service( "github", ServiceConfig( { "username": "TestUser", "password": "TestPass", "email": "[email protected]", } ), ) task_config = TaskConfig() task = PullRequests(project_config, task_config) repo = mock.Mock() repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] task.get_repo = mock.Mock(return_value=repo) task.logger = mock.Mock() task() task.logger.info.assert_called_with("#1: Test PR")
Fix test that wasn't running
Fix test that wasn't running
Python
bsd-3-clause
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
import mock import unittest from cumulusci.core.config import ServiceConfig from cumulusci.core.config import TaskConfig from cumulusci.tasks.github import PullRequests from cumulusci.tests.util import create_project_config - @mock.patch("cumulusci.tasks.github.base.get_github_api_for_user", mock.Mock()) class TestPullRequests(unittest.TestCase): + def test_run_task(self): - project_config = create_project_config() + project_config = create_project_config() - project_config.keychain.set_service( + project_config.keychain.set_service( - "github", + "github", - ServiceConfig( + ServiceConfig( - { + { - "username": "TestUser", + "username": "TestUser", - "password": "TestPass", + "password": "TestPass", - "email": "[email protected]", + "email": "[email protected]", + } - } + ), - ), + ) - ) - task_config = TaskConfig() + task_config = TaskConfig() - task = PullRequests(project_config, task_config) + task = PullRequests(project_config, task_config) - repo = mock.Mock() + repo = mock.Mock() - repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] + repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] - task.get_repo = mock.Mock(return_value=repo) + task.get_repo = mock.Mock(return_value=repo) - task.logger = mock.Mock() + task.logger = mock.Mock() - task() + task() - task.logger.info.assert_called_with("#1: Test PR") + task.logger.info.assert_called_with("#1: Test PR")
Fix test that wasn't running
## Code Before: import mock import unittest from cumulusci.core.config import ServiceConfig from cumulusci.core.config import TaskConfig from cumulusci.tasks.github import PullRequests from cumulusci.tests.util import create_project_config @mock.patch("cumulusci.tasks.github.base.get_github_api_for_user", mock.Mock()) class TestPullRequests(unittest.TestCase): project_config = create_project_config() project_config.keychain.set_service( "github", ServiceConfig( { "username": "TestUser", "password": "TestPass", "email": "[email protected]", } ), ) task_config = TaskConfig() task = PullRequests(project_config, task_config) repo = mock.Mock() repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] task.get_repo = mock.Mock(return_value=repo) task.logger = mock.Mock() task() task.logger.info.assert_called_with("#1: Test PR") ## Instruction: Fix test that wasn't running ## Code After: import mock import unittest from cumulusci.core.config import ServiceConfig from cumulusci.core.config import TaskConfig from cumulusci.tasks.github import PullRequests from cumulusci.tests.util import create_project_config class TestPullRequests(unittest.TestCase): def test_run_task(self): project_config = create_project_config() project_config.keychain.set_service( "github", ServiceConfig( { "username": "TestUser", "password": "TestPass", "email": "[email protected]", } ), ) task_config = TaskConfig() task = PullRequests(project_config, task_config) repo = mock.Mock() repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] task.get_repo = mock.Mock(return_value=repo) task.logger = mock.Mock() task() task.logger.info.assert_called_with("#1: Test PR")
import mock import unittest from cumulusci.core.config import ServiceConfig from cumulusci.core.config import TaskConfig from cumulusci.tasks.github import PullRequests from cumulusci.tests.util import create_project_config - @mock.patch("cumulusci.tasks.github.base.get_github_api_for_user", mock.Mock()) class TestPullRequests(unittest.TestCase): + def test_run_task(self): - project_config = create_project_config() + project_config = create_project_config() ? ++++ - project_config.keychain.set_service( + project_config.keychain.set_service( ? ++++ - "github", + "github", ? ++++ - ServiceConfig( + ServiceConfig( ? ++++ - { + { ? ++++ - "username": "TestUser", + "username": "TestUser", ? ++++ - "password": "TestPass", + "password": "TestPass", ? ++++ - "email": "[email protected]", + "email": "[email protected]", ? ++++ + } - } ? ^ + ), ? ^^ - ), ? - + ) - ) - task_config = TaskConfig() + task_config = TaskConfig() ? ++++ - task = PullRequests(project_config, task_config) + task = PullRequests(project_config, task_config) ? ++++ - repo = mock.Mock() + repo = mock.Mock() ? ++++ - repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] + repo.pull_requests.return_value = [mock.Mock(number=1, title="Test PR")] ? ++++ - task.get_repo = mock.Mock(return_value=repo) + task.get_repo = mock.Mock(return_value=repo) ? ++++ - task.logger = mock.Mock() + task.logger = mock.Mock() ? ++++ - task() + task() ? ++++ - task.logger.info.assert_called_with("#1: Test PR") + task.logger.info.assert_called_with("#1: Test PR") ? ++++
00bb631437fdf45c7a067da43aa042f8b1f6ef8e
osf_models/models/tag.py
osf_models/models/tag.py
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False)
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
Add unique together on _id and system
Add unique together on _id and system
Python
apache-2.0
Nesiehr/osf.io,adlius/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,crcresearch/osf.io,caneruguz/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,baylee-d/osf.io,leb2dg/osf.io,acshi/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,chrisseto/osf.io,chennan47/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,aaxelb/osf.io,chennan47/osf.io,caneruguz/osf.io,binoculars/osf.io,Nesiehr/osf.io,leb2dg/osf.io,caseyrollins/osf.io,adlius/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,TomBaxter/osf.io,erinspace/osf.io,sloria/osf.io,crcresearch/osf.io,binoculars/osf.io,felliott/osf.io,laurenrevere/osf.io,baylee-d/osf.io,leb2dg/osf.io,hmoco/osf.io,cwisecarver/osf.io,aaxelb/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,mfraezz/osf.io,mattclark/osf.io,binoculars/osf.io,mluo613/osf.io,icereval/osf.io,aaxelb/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,hmoco/osf.io,mfraezz/osf.io,cslzchen/osf.io,aaxelb/osf.io,alexschiller/osf.io,TomBaxter/osf.io,mluo613/osf.io,mluo613/osf.io,brianjgeiger/osf.io,icereval/osf.io,acshi/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,sloria/osf.io,pattisdr/osf.io,mfraezz/osf.io,pattisdr/osf.io,mluo613/osf.io,brianjgeiger/osf.io,acshi/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,cslzchen/osf.io,icereval/osf.io,adlius/osf.io,sloria/osf.io,TomBaxter/osf.io,hmoco/osf.io,acshi/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,mfraezz/osf.io,felliott/osf.io,hmoco/osf.io,caneruguz/osf.io,mluo613/osf.io,acshi/osf.io,Johnetordoff/osf.io,erinspace/osf.io,cwisecarver/osf.io,saradbowman/osf.io,saradbowman/osf.io,Nesiehr/osf.io,chrisseto/osf.io,felliott/osf.io,caseyrollins/osf.io,chennan47/osf.io,cslzchen/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) + class Meta: + unique_together = ('_id', 'system') +
Add unique together on _id and system
## Code Before: from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) ## Instruction: Add unique together on _id and system ## Code After: from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) class Meta: unique_together = ('_id', 'system')
from django.db import models from .base import BaseModel class Tag(BaseModel): _id = models.CharField(max_length=1024) lower = models.CharField(max_length=1024) system = models.BooleanField(default=False) + + class Meta: + unique_together = ('_id', 'system')
d66355e4758b37be39d17d681ede1dbbd6b9b311
setmagic/admin.py
setmagic/admin.py
from django import forms from django.contrib import admin from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
Use importlib to load custom fields by str
Use importlib to load custom fields by str
Python
mit
7ws/django-setmagic
from django import forms from django.contrib import admin + from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: + if isinstance(custom_field, str): + module, name = custom_field.rsplit('.', 1) + custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
Use importlib to load custom fields by str
## Code Before: from django import forms from django.contrib import admin from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin) ## Instruction: Use importlib to load custom fields by str ## Code After: from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
from django import forms from django.contrib import admin + from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: + if isinstance(custom_field, str): + module, name = custom_field.rsplit('.', 1) + custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
71fef8b9696d79f7d6fd024320bc23ce1b7425f3
greatbigcrane/preferences/models.py
greatbigcrane/preferences/models.py
from django.db import models class Preference(models.Model): name = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=512)
from django.db import models class PreferenceManager(models.Manager): def get_preference(self, name, default=None): try: value = Preference.objects.get(name="projects_directory").value except Preference.DoesNotExist: return default class Preference(models.Model): name = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=512) objects = PreferenceManager()
Add a manager to make getting preferences prettier.
Add a manager to make getting preferences prettier.
Python
apache-2.0
pnomolos/greatbigcrane,pnomolos/greatbigcrane
from django.db import models + + class PreferenceManager(models.Manager): + def get_preference(self, name, default=None): + try: + value = Preference.objects.get(name="projects_directory").value + except Preference.DoesNotExist: + return default class Preference(models.Model): name = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=512) + objects = PreferenceManager()
Add a manager to make getting preferences prettier.
## Code Before: from django.db import models class Preference(models.Model): name = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=512) ## Instruction: Add a manager to make getting preferences prettier. ## Code After: from django.db import models class PreferenceManager(models.Manager): def get_preference(self, name, default=None): try: value = Preference.objects.get(name="projects_directory").value except Preference.DoesNotExist: return default class Preference(models.Model): name = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=512) objects = PreferenceManager()
from django.db import models + + class PreferenceManager(models.Manager): + def get_preference(self, name, default=None): + try: + value = Preference.objects.get(name="projects_directory").value + except Preference.DoesNotExist: + return default class Preference(models.Model): name = models.CharField(max_length=32, unique=True) value = models.CharField(max_length=512) + objects = PreferenceManager()
d487e8d74d8bbbadf003cd128f80868cc5651d21
shenfun/optimization/__init__.py
shenfun/optimization/__init__.py
import os import importlib from functools import wraps from . import cython try: from . import numba except ModuleNotFoundError: numba = None def optimizer(func): """Decorator used to wrap calls to optimized versions of functions.""" from shenfun.config import config mod = config['optimization']['mode'] verbose = config['optimization']['verbose'] if mod.lower() not in ('cython', 'numba'): # Use python function if verbose: print(func.__name__ + ' not optimized') return func mod = importlib.import_module('shenfun.optimization.'+mod.lower()) fun = getattr(mod, func.__name__, func) if verbose: if fun is func: print(fun.__name__ + ' not optimized') @wraps(func) def wrapped_function(*args, **kwargs): u0 = fun(*args, **kwargs) return u0 return wrapped_function
import os import importlib from functools import wraps from . import cython try: from . import numba except ModuleNotFoundError: numba = None def optimizer(func): """Decorator used to wrap calls to optimized versions of functions.""" from shenfun.config import config mod = 'cython' verbose = False try: mod = config['optimization']['mode'] verbose = config['optimization']['verbose'] except KeyError: pass if mod.lower() not in ('cython', 'numba'): # Use python function if verbose: print(func.__name__ + ' not optimized') return func mod = importlib.import_module('shenfun.optimization.'+mod.lower()) fun = getattr(mod, func.__name__, func) if verbose: if fun is func: print(fun.__name__ + ' not optimized') @wraps(func) def wrapped_function(*args, **kwargs): u0 = fun(*args, **kwargs) return u0 return wrapped_function
Use try clause for config in optimizer
Use try clause for config in optimizer
Python
bsd-2-clause
spectralDNS/shenfun,spectralDNS/shenfun,spectralDNS/shenfun
import os import importlib from functools import wraps from . import cython try: from . import numba except ModuleNotFoundError: numba = None def optimizer(func): """Decorator used to wrap calls to optimized versions of functions.""" from shenfun.config import config + mod = 'cython' + verbose = False + try: - mod = config['optimization']['mode'] + mod = config['optimization']['mode'] - verbose = config['optimization']['verbose'] + verbose = config['optimization']['verbose'] + except KeyError: + pass + if mod.lower() not in ('cython', 'numba'): # Use python function if verbose: print(func.__name__ + ' not optimized') return func mod = importlib.import_module('shenfun.optimization.'+mod.lower()) fun = getattr(mod, func.__name__, func) if verbose: if fun is func: print(fun.__name__ + ' not optimized') @wraps(func) def wrapped_function(*args, **kwargs): u0 = fun(*args, **kwargs) return u0 return wrapped_function
Use try clause for config in optimizer
## Code Before: import os import importlib from functools import wraps from . import cython try: from . import numba except ModuleNotFoundError: numba = None def optimizer(func): """Decorator used to wrap calls to optimized versions of functions.""" from shenfun.config import config mod = config['optimization']['mode'] verbose = config['optimization']['verbose'] if mod.lower() not in ('cython', 'numba'): # Use python function if verbose: print(func.__name__ + ' not optimized') return func mod = importlib.import_module('shenfun.optimization.'+mod.lower()) fun = getattr(mod, func.__name__, func) if verbose: if fun is func: print(fun.__name__ + ' not optimized') @wraps(func) def wrapped_function(*args, **kwargs): u0 = fun(*args, **kwargs) return u0 return wrapped_function ## Instruction: Use try clause for config in optimizer ## Code After: import os import importlib from functools import wraps from . import cython try: from . import numba except ModuleNotFoundError: numba = None def optimizer(func): """Decorator used to wrap calls to optimized versions of functions.""" from shenfun.config import config mod = 'cython' verbose = False try: mod = config['optimization']['mode'] verbose = config['optimization']['verbose'] except KeyError: pass if mod.lower() not in ('cython', 'numba'): # Use python function if verbose: print(func.__name__ + ' not optimized') return func mod = importlib.import_module('shenfun.optimization.'+mod.lower()) fun = getattr(mod, func.__name__, func) if verbose: if fun is func: print(fun.__name__ + ' not optimized') @wraps(func) def wrapped_function(*args, **kwargs): u0 = fun(*args, **kwargs) return u0 return wrapped_function
import os import importlib from functools import wraps from . import cython try: from . import numba except ModuleNotFoundError: numba = None def optimizer(func): """Decorator used to wrap calls to optimized versions of functions.""" from shenfun.config import config + mod = 'cython' + verbose = False + try: - mod = config['optimization']['mode'] + mod = config['optimization']['mode'] ? ++++ - verbose = config['optimization']['verbose'] + verbose = config['optimization']['verbose'] ? ++++ + except KeyError: + pass + if mod.lower() not in ('cython', 'numba'): # Use python function if verbose: print(func.__name__ + ' not optimized') return func mod = importlib.import_module('shenfun.optimization.'+mod.lower()) fun = getattr(mod, func.__name__, func) if verbose: if fun is func: print(fun.__name__ + ' not optimized') @wraps(func) def wrapped_function(*args, **kwargs): u0 = fun(*args, **kwargs) return u0 return wrapped_function
b2e537c2d054854d0b36ccee7567c9ba9c2a5516
modulation_test.py
modulation_test.py
import pygame import random from demodulate.cfg import * from gen_tone import * if __name__ == "__main__": pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1) pygame.mixer.init() WPM = random.uniform(2,20) pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' #gen_test_data() data = gen_tone(pattern, WPM) snd = pygame.sndarray.make_sound(data) snd.play()
import pygame import random import time from demodulate.cfg import * from modulate import * from gen_tone import * if __name__ == "__main__": pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1) pygame.mixer.init() WPM = random.uniform(2,20) pattern = chars_to_elements.letters_to_sequence("NA NA NA NA NA NA NA BATMAN") #gen_test_data() data = gen_tone(pattern, WPM) snd = pygame.sndarray.make_sound(data) chn = snd.play() while chn.get_busy(): time.sleep(1)
Make modulation test wait for sound to stop playing before exiting
Make modulation test wait for sound to stop playing before exiting
Python
mit
nickodell/morse-code
import pygame import random + import time from demodulate.cfg import * + from modulate import * from gen_tone import * if __name__ == "__main__": pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1) pygame.mixer.init() WPM = random.uniform(2,20) - pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' + pattern = chars_to_elements.letters_to_sequence("NA NA NA NA NA NA NA BATMAN") #gen_test_data() data = gen_tone(pattern, WPM) snd = pygame.sndarray.make_sound(data) - snd.play() + chn = snd.play() + while chn.get_busy(): + time.sleep(1)
Make modulation test wait for sound to stop playing before exiting
## Code Before: import pygame import random from demodulate.cfg import * from gen_tone import * if __name__ == "__main__": pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1) pygame.mixer.init() WPM = random.uniform(2,20) pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' #gen_test_data() data = gen_tone(pattern, WPM) snd = pygame.sndarray.make_sound(data) snd.play() ## Instruction: Make modulation test wait for sound to stop playing before exiting ## Code After: import pygame import random import time from demodulate.cfg import * from modulate import * from gen_tone import * if __name__ == "__main__": pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1) pygame.mixer.init() WPM = random.uniform(2,20) pattern = chars_to_elements.letters_to_sequence("NA NA NA NA NA NA NA BATMAN") #gen_test_data() data = gen_tone(pattern, WPM) snd = pygame.sndarray.make_sound(data) chn = snd.play() while chn.get_busy(): time.sleep(1)
import pygame import random + import time from demodulate.cfg import * + from modulate import * from gen_tone import * if __name__ == "__main__": pygame.mixer.pre_init(frequency = int(SAMPLE_FREQ), channels = 1) pygame.mixer.init() WPM = random.uniform(2,20) - pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' + pattern = chars_to_elements.letters_to_sequence("NA NA NA NA NA NA NA BATMAN") #gen_test_data() data = gen_tone(pattern, WPM) snd = pygame.sndarray.make_sound(data) - snd.play() + chn = snd.play() ? ++++++ + while chn.get_busy(): + time.sleep(1)
621ca7bebfcc53026d8f98b9f6cfefe6ff25961b
src/util/constants.py
src/util/constants.py
SOS = '<S>' # end of sentence token EOS = '</S>'
SOS = chr(2) # end of sentence token EOS = chr(3)
Use separate characters for SOS and EOS
Use separate characters for SOS and EOS
Python
mit
milankinen/c2w2c,milankinen/c2w2c
- SOS = '<S>' + SOS = chr(2) # end of sentence token - EOS = '</S>' + EOS = chr(3)
Use separate characters for SOS and EOS
## Code Before: SOS = '<S>' # end of sentence token EOS = '</S>' ## Instruction: Use separate characters for SOS and EOS ## Code After: SOS = chr(2) # end of sentence token EOS = chr(3)
- SOS = '<S>' + SOS = chr(2) # end of sentence token - EOS = '</S>' + EOS = chr(3)
86c106fc95946e4558fabfae57bbd039b248a70c
mindbender/maya/plugins/validate_single_shape.py
mindbender/maya/plugins/validate_single_shape.py
import pyblish.api class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin): """One mesh per transform""" label = "Validate Single Shape" order = pyblish.api.ValidatorOrder hosts = ["maya"] active = False optional = True families = [ "mindbender.model", "mindbender.lookdev" ] def process(self, instance): from maya import cmds has_multiple_shapes = list() for node in instance: children = cmds.listRelatives(node, allDescendents=True) or list() shapes = cmds.listRelatives(node, shapes=True) or list() # Ensure there is only one child; there could be many, # including other transform nodes. has_single_shape = len(children) == 1 # Ensure the one child is a shape has_single_child = len(shapes) == 1 # Ensure the one child is of type "mesh" has_single_mesh = cmds.nodeType(shapes[0]) == "mesh" if not all([has_single_child, has_single_shape, has_single_mesh]): has_multiple_shapes.append(node) assert not has_multiple_shapes, ( "\"%s\" has transforms with multiple shapes: %s" % ( instance, ", ".join( "\"" + member + "\"" for member in has_multiple_shapes)) )
import pyblish.api class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin): """Transforms with a mesh must ever only contain a single mesh This ensures models only contain a single shape node. """ label = "Validate Single Shape" order = pyblish.api.ValidatorOrder hosts = ["maya"] families = [ "mindbender.model", ] def process(self, instance): from maya import cmds has_multiple_shapes = list() # Consider entire hierarchy of nodes included in an Instance hierarchy = cmds.listRelatives(instance, allDescendents=True) # Consider only nodes of type="mesh" meshes = cmds.ls(hierarchy, type="mesh", long=True) transforms = cmds.listRelatives(meshes, parent=True) for transform in set(transforms): shapes = cmds.listRelatives(transform, shapes=True) or list() # Ensure the one child is a shape has_single_shape = len(shapes) == 1 self.log.info("has single shape: %s" % has_single_shape) # Ensure the one shape is of type "mesh" has_single_mesh = ( has_single_shape and cmds.nodeType(shapes[0]) == "mesh" ) self.log.info("has single mesh: %s" % has_single_mesh) if not all([has_single_shape, has_single_mesh]): has_multiple_shapes.append(transform) assert not has_multiple_shapes, ( "\"%s\" has transforms with multiple shapes: %s" % ( instance, ", ".join( "\"" + member + "\"" for member in has_multiple_shapes)) )
Repair validate single shape validator
Repair validate single shape validator
Python
mit
mindbender-studio/core,MoonShineVFX/core,getavalon/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core
import pyblish.api class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin): - """One mesh per transform""" + """Transforms with a mesh must ever only contain a single mesh + + This ensures models only contain a single shape node. + + """ label = "Validate Single Shape" order = pyblish.api.ValidatorOrder hosts = ["maya"] - active = False - optional = True families = [ "mindbender.model", - "mindbender.lookdev" ] def process(self, instance): from maya import cmds has_multiple_shapes = list() - for node in instance: + # Consider entire hierarchy of nodes included in an Instance - children = cmds.listRelatives(node, allDescendents=True) or list() + hierarchy = cmds.listRelatives(instance, allDescendents=True) - shapes = cmds.listRelatives(node, shapes=True) or list() - # Ensure there is only one child; there could be many, - # including other transform nodes. - has_single_shape = len(children) == 1 + # Consider only nodes of type="mesh" + meshes = cmds.ls(hierarchy, type="mesh", long=True) + transforms = cmds.listRelatives(meshes, parent=True) + + for transform in set(transforms): + shapes = cmds.listRelatives(transform, shapes=True) or list() # Ensure the one child is a shape - has_single_child = len(shapes) == 1 + has_single_shape = len(shapes) == 1 + self.log.info("has single shape: %s" % has_single_shape) - # Ensure the one child is of type "mesh" + # Ensure the one shape is of type "mesh" + has_single_mesh = ( + has_single_shape and - has_single_mesh = cmds.nodeType(shapes[0]) == "mesh" + cmds.nodeType(shapes[0]) == "mesh" + ) + self.log.info("has single mesh: %s" % has_single_mesh) - if not all([has_single_child, + if not all([has_single_shape, has_single_mesh]): - has_single_shape, - has_single_mesh]): - has_multiple_shapes.append(node) + has_multiple_shapes.append(transform) assert not has_multiple_shapes, ( "\"%s\" has transforms with multiple shapes: %s" % ( instance, ", ".join( "\"" + member + "\"" for member in has_multiple_shapes)) )
Repair validate single shape validator
## Code Before: import pyblish.api class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin): """One mesh per transform""" label = "Validate Single Shape" order = pyblish.api.ValidatorOrder hosts = ["maya"] active = False optional = True families = [ "mindbender.model", "mindbender.lookdev" ] def process(self, instance): from maya import cmds has_multiple_shapes = list() for node in instance: children = cmds.listRelatives(node, allDescendents=True) or list() shapes = cmds.listRelatives(node, shapes=True) or list() # Ensure there is only one child; there could be many, # including other transform nodes. has_single_shape = len(children) == 1 # Ensure the one child is a shape has_single_child = len(shapes) == 1 # Ensure the one child is of type "mesh" has_single_mesh = cmds.nodeType(shapes[0]) == "mesh" if not all([has_single_child, has_single_shape, has_single_mesh]): has_multiple_shapes.append(node) assert not has_multiple_shapes, ( "\"%s\" has transforms with multiple shapes: %s" % ( instance, ", ".join( "\"" + member + "\"" for member in has_multiple_shapes)) ) ## Instruction: Repair validate single shape validator ## Code After: import pyblish.api class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin): """Transforms with a mesh must ever only contain a single mesh This ensures models only contain a single shape node. """ label = "Validate Single Shape" order = pyblish.api.ValidatorOrder hosts = ["maya"] families = [ "mindbender.model", ] def process(self, instance): from maya import cmds has_multiple_shapes = list() # Consider entire hierarchy of nodes included in an Instance hierarchy = cmds.listRelatives(instance, allDescendents=True) # Consider only nodes of type="mesh" meshes = cmds.ls(hierarchy, type="mesh", long=True) transforms = cmds.listRelatives(meshes, parent=True) for transform in set(transforms): shapes = cmds.listRelatives(transform, shapes=True) or list() # Ensure the one child is a shape has_single_shape = len(shapes) == 1 self.log.info("has single shape: %s" % has_single_shape) # Ensure the one shape is of type "mesh" has_single_mesh = ( has_single_shape and cmds.nodeType(shapes[0]) == "mesh" ) self.log.info("has single mesh: %s" % has_single_mesh) if not all([has_single_shape, has_single_mesh]): has_multiple_shapes.append(transform) assert not has_multiple_shapes, ( "\"%s\" has transforms with multiple shapes: %s" % ( instance, ", ".join( "\"" + member + "\"" for member in has_multiple_shapes)) )
import pyblish.api class ValidateMindbenderSingleShape(pyblish.api.InstancePlugin): - """One mesh per transform""" + """Transforms with a mesh must ever only contain a single mesh + + This ensures models only contain a single shape node. + + """ label = "Validate Single Shape" order = pyblish.api.ValidatorOrder hosts = ["maya"] - active = False - optional = True families = [ "mindbender.model", - "mindbender.lookdev" ] def process(self, instance): from maya import cmds has_multiple_shapes = list() - for node in instance: + # Consider entire hierarchy of nodes included in an Instance - children = cmds.listRelatives(node, allDescendents=True) or list() ? ^^^^ ^^^^^^ ^^ ---------- + hierarchy = cmds.listRelatives(instance, allDescendents=True) ? ^^^^^^ ^ + ^^^^^ - shapes = cmds.listRelatives(node, shapes=True) or list() - # Ensure there is only one child; there could be many, - # including other transform nodes. - has_single_shape = len(children) == 1 + # Consider only nodes of type="mesh" + meshes = cmds.ls(hierarchy, type="mesh", long=True) + transforms = cmds.listRelatives(meshes, parent=True) + + for transform in set(transforms): + shapes = cmds.listRelatives(transform, shapes=True) or list() # Ensure the one child is a shape - has_single_child = len(shapes) == 1 ? ^ ^^^ + has_single_shape = len(shapes) == 1 ? ^ ^^^ + self.log.info("has single shape: %s" % has_single_shape) - # Ensure the one child is of type "mesh" ? ^ ^^^ + # Ensure the one shape is of type "mesh" ? ^ ^^^ + has_single_mesh = ( + has_single_shape and - has_single_mesh = cmds.nodeType(shapes[0]) == "mesh" ? --------------- ^ + cmds.nodeType(shapes[0]) == "mesh" ? ^^ + ) + self.log.info("has single mesh: %s" % has_single_mesh) - if not all([has_single_child, ? ^ ^^ + if not all([has_single_shape, has_single_mesh]): ? ^ ++++++++++ ++ ^^^^^^^^^ - has_single_shape, - has_single_mesh]): - has_multiple_shapes.append(node) ? ^^ + has_multiple_shapes.append(transform) ? +++ ++ ^^ assert not has_multiple_shapes, ( "\"%s\" has transforms with multiple shapes: %s" % ( instance, ", ".join( "\"" + member + "\"" for member in has_multiple_shapes)) )
c7660db45e0275a685a6cc450fd4341a69c52b92
threaded_multihost/fields.py
threaded_multihost/fields.py
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, User, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value try: from south.modelsinspector import add_introspection_rules except ImportError: add_introspection_rules = False if add_introspection_rules: add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"])
Patch from chrischambers to enable south migrations.
Patch from chrischambers to enable south migrations.
Python
bsd-3-clause
diver-in-sky/django-threaded-multihost
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): + kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) - ForeignKey.__init__(self, User, **kwargs) + ForeignKey.__init__(self, **kwargs) - + class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value + try: + from south.modelsinspector import add_introspection_rules + except ImportError: + add_introspection_rules = False + + if add_introspection_rules: + add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"]) +
Patch from chrischambers to enable south migrations.
## Code Before: from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, User, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value ## Instruction: Patch from chrischambers to enable south migrations. ## Code After: from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) ForeignKey.__init__(self, **kwargs) class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value try: from south.modelsinspector import add_introspection_rules except ImportError: add_introspection_rules = False if add_introspection_rules: add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"])
from django.db.models import ForeignKey from django.contrib.auth.models import User import threadlocals class UserField(ForeignKey): """ UserField By defaults, foreign key to User; null=True, blank=True """ def __init__(self, **kwargs): + kwargs.setdefault('to', User) kwargs.setdefault('null', True) kwargs.setdefault('blank', True) - ForeignKey.__init__(self, User, **kwargs) ? ------ + ForeignKey.__init__(self, **kwargs) - + class CreatorField(UserField): """ CreatorField By default, sets editable=False, default=threadlocals.get_current_user """ def __init__(self, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('default', threadlocals.get_current_user) UserField.__init__(self, **kwargs) class EditorField(CreatorField): """ EditorField By default, sets editable=False, default=threadlocals.get_current_user Sets value to get_current_user() on each save of the model. """ def __init__(self, **kwargs): super(CreatorField, self).__init__(**kwargs) def pre_save(self, model_instance, add): value = threadlocals.get_current_user() setattr(model_instance, self.name, value) if value: value = value.pk setattr(model_instance, self.attname, value) return value + + try: + from south.modelsinspector import add_introspection_rules + except ImportError: + add_introspection_rules = False + + if add_introspection_rules: + add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"])
369f607df1efa7cd715a1d5ed4d86b972f44d23b
project/home/views.py
project/home/views.py
from flask import render_template, Blueprint from project import db # pragma: no cover from project.models import Person # pragma: no cover # config home_blueprint = Blueprint( 'home', __name__, template_folder='templates' ) # pragma: no cover # routes # use decorators to link the function to a url @home_blueprint.route('/', methods=['GET', 'POST']) # pragma: no cover def home(): error = None persons = db.session.query(Person).all() return render_template( 'index.html', persons=persons, error=error)
from flask import render_template, Blueprint from project import db # pragma: no cover from project.models import Person # pragma: no cover import random # config home_blueprint = Blueprint( 'home', __name__, template_folder='templates' ) # pragma: no cover MAX_GRID_SIZE_HOMEPAGE_PEOPLE = 6 # routes # use decorators to link the function to a url @home_blueprint.route('/', methods=['GET', 'POST']) # pragma: no cover def home(): error = None current_person_count = db.session.query(Person).count() if current_person_count <= MAX_GRID_SIZE_HOMEPAGE_PEOPLE: persons = db.session.query(Person).all() else: persons = [] while len(persons) < MAX_GRID_SIZE_HOMEPAGE_PEOPLE: rand = random.randrange(0, current_person_count) random_person = db.session.query(Person)[rand] if random_person not in persons: persons.append(random_person) return render_template( 'index.html', persons=persons, error=error)
Select MAX people randomly, else all.
Select MAX people randomly, else all.
Python
isc
dhmncivichacks/timewebsite,mikeputnam/timewebsite,mikeputnam/timewebsite,dhmncivichacks/timewebsite,dhmncivichacks/timewebsite,mikeputnam/timewebsite
from flask import render_template, Blueprint from project import db # pragma: no cover from project.models import Person # pragma: no cover + + import random # config home_blueprint = Blueprint( 'home', __name__, template_folder='templates' ) # pragma: no cover + MAX_GRID_SIZE_HOMEPAGE_PEOPLE = 6 # routes # use decorators to link the function to a url @home_blueprint.route('/', methods=['GET', 'POST']) # pragma: no cover def home(): error = None + current_person_count = db.session.query(Person).count() + if current_person_count <= MAX_GRID_SIZE_HOMEPAGE_PEOPLE: - persons = db.session.query(Person).all() + persons = db.session.query(Person).all() + else: + persons = [] + while len(persons) < MAX_GRID_SIZE_HOMEPAGE_PEOPLE: + rand = random.randrange(0, current_person_count) + random_person = db.session.query(Person)[rand] + if random_person not in persons: + persons.append(random_person) return render_template( 'index.html', persons=persons, error=error)
Select MAX people randomly, else all.
## Code Before: from flask import render_template, Blueprint from project import db # pragma: no cover from project.models import Person # pragma: no cover # config home_blueprint = Blueprint( 'home', __name__, template_folder='templates' ) # pragma: no cover # routes # use decorators to link the function to a url @home_blueprint.route('/', methods=['GET', 'POST']) # pragma: no cover def home(): error = None persons = db.session.query(Person).all() return render_template( 'index.html', persons=persons, error=error) ## Instruction: Select MAX people randomly, else all. ## Code After: from flask import render_template, Blueprint from project import db # pragma: no cover from project.models import Person # pragma: no cover import random # config home_blueprint = Blueprint( 'home', __name__, template_folder='templates' ) # pragma: no cover MAX_GRID_SIZE_HOMEPAGE_PEOPLE = 6 # routes # use decorators to link the function to a url @home_blueprint.route('/', methods=['GET', 'POST']) # pragma: no cover def home(): error = None current_person_count = db.session.query(Person).count() if current_person_count <= MAX_GRID_SIZE_HOMEPAGE_PEOPLE: persons = db.session.query(Person).all() else: persons = [] while len(persons) < MAX_GRID_SIZE_HOMEPAGE_PEOPLE: rand = random.randrange(0, current_person_count) random_person = db.session.query(Person)[rand] if random_person not in persons: persons.append(random_person) return render_template( 'index.html', persons=persons, error=error)
from flask import render_template, Blueprint from project import db # pragma: no cover from project.models import Person # pragma: no cover + + import random # config home_blueprint = Blueprint( 'home', __name__, template_folder='templates' ) # pragma: no cover + MAX_GRID_SIZE_HOMEPAGE_PEOPLE = 6 # routes # use decorators to link the function to a url @home_blueprint.route('/', methods=['GET', 'POST']) # pragma: no cover def home(): error = None + current_person_count = db.session.query(Person).count() + if current_person_count <= MAX_GRID_SIZE_HOMEPAGE_PEOPLE: - persons = db.session.query(Person).all() + persons = db.session.query(Person).all() ? ++++ + else: + persons = [] + while len(persons) < MAX_GRID_SIZE_HOMEPAGE_PEOPLE: + rand = random.randrange(0, current_person_count) + random_person = db.session.query(Person)[rand] + if random_person not in persons: + persons.append(random_person) return render_template( 'index.html', persons=persons, error=error)
3a8a7661c0aad111dbaace178062352b30f7fac5
numcodecs/tests/__init__.py
numcodecs/tests/__init__.py
from __future__ import absolute_import, print_function, division
from __future__ import absolute_import, print_function, division import pytest pytest.register_assert_rewrite('numcodecs.tests.common')
Enable pytest rewriting in test helper functions.
Enable pytest rewriting in test helper functions.
Python
mit
alimanfoo/numcodecs,zarr-developers/numcodecs,alimanfoo/numcodecs
from __future__ import absolute_import, print_function, division + import pytest + + pytest.register_assert_rewrite('numcodecs.tests.common') +
Enable pytest rewriting in test helper functions.
## Code Before: from __future__ import absolute_import, print_function, division ## Instruction: Enable pytest rewriting in test helper functions. ## Code After: from __future__ import absolute_import, print_function, division import pytest pytest.register_assert_rewrite('numcodecs.tests.common')
from __future__ import absolute_import, print_function, division + + import pytest + + pytest.register_assert_rewrite('numcodecs.tests.common')
ed491860864c363be36d99c09ff0131a5fe00aaf
test/Driver/Dependencies/Inputs/touch.py
test/Driver/Dependencies/Inputs/touch.py
import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # offset between Unix and LLVM epochs timeVal += 946684800 # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
Fix tests for file timestamps to drop the LLVM epoch offset.
Fix tests for file timestamps to drop the LLVM epoch offset. Now that Swift is not using LLVM's TimeValue (564fc6f2 and previous commit) there is no offset from the system_clock epoch. The offset could be added into the tests that use touch.py (so the times would not be back in 1984) but I decided not to do that to avoid merge conflicts in the test files.
Python
apache-2.0
aschwaighofer/swift,tinysun212/swift-windows,arvedviehweger/swift,xwu/swift,airspeedswift/swift,parkera/swift,tinysun212/swift-windows,JGiola/swift,JaSpa/swift,JGiola/swift,parkera/swift,zisko/swift,hughbe/swift,codestergit/swift,CodaFi/swift,rudkx/swift,huonw/swift,tkremenek/swift,jtbandes/swift,codestergit/swift,practicalswift/swift,frootloops/swift,tardieu/swift,jckarter/swift,return/swift,benlangmuir/swift,xwu/swift,danielmartin/swift,djwbrown/swift,gribozavr/swift,return/swift,atrick/swift,arvedviehweger/swift,danielmartin/swift,nathawes/swift,tjw/swift,OscarSwanros/swift,shahmishal/swift,benlangmuir/swift,benlangmuir/swift,brentdax/swift,JGiola/swift,amraboelela/swift,roambotics/swift,tardieu/swift,tinysun212/swift-windows,jmgc/swift,parkera/swift,tardieu/swift,danielmartin/swift,gribozavr/swift,sschiau/swift,natecook1000/swift,huonw/swift,karwa/swift,gregomni/swift,harlanhaskins/swift,aschwaighofer/swift,practicalswift/swift,manavgabhawala/swift,glessard/swift,uasys/swift,gregomni/swift,allevato/swift,jckarter/swift,alblue/swift,natecook1000/swift,amraboelela/swift,xedin/swift,gottesmm/swift,hughbe/swift,harlanhaskins/swift,felix91gr/swift,return/swift,tkremenek/swift,shajrawi/swift,allevato/swift,sschiau/swift,sschiau/swift,stephentyrone/swift,tkremenek/swift,uasys/swift,JaSpa/swift,stephentyrone/swift,apple/swift,milseman/swift,lorentey/swift,shahmishal/swift,shajrawi/swift,atrick/swift,practicalswift/swift,xwu/swift,gribozavr/swift,return/swift,xedin/swift,hooman/swift,CodaFi/swift,gregomni/swift,apple/swift,austinzheng/swift,zisko/swift,milseman/swift,huonw/swift,jopamer/swift,jckarter/swift,benlangmuir/swift,stephentyrone/swift,xwu/swift,tardieu/swift,amraboelela/swift,swiftix/swift,CodaFi/swift,bitjammer/swift,danielmartin/swift,huonw/swift,JaSpa/swift,xedin/swift,austinzheng/swift,lorentey/swift,huonw/swift,deyton/swift,nathawes/swift,lorentey/swift,apple/swift,austinzheng/swift,harlanhaskins/swift,milseman/swift,glessard/swift,devincoughlin/swift,airspeedswift/swift,OscarSwanros/swift,glessard/swift,felix91gr/swift,OscarSwanros/swift,nathawes/swift,karwa/swift,gregomni/swift,tardieu/swift,felix91gr/swift,hooman/swift,apple/swift,atrick/swift,parkera/swift,gottesmm/swift,deyton/swift,aschwaighofer/swift,devincoughlin/swift,arvedviehweger/swift,bitjammer/swift,harlanhaskins/swift,gregomni/swift,felix91gr/swift,alblue/swift,shajrawi/swift,zisko/swift,jmgc/swift,tjw/swift,sschiau/swift,frootloops/swift,jtbandes/swift,xedin/swift,ahoppen/swift,xedin/swift,austinzheng/swift,JaSpa/swift,shahmishal/swift,atrick/swift,brentdax/swift,arvedviehweger/swift,brentdax/swift,tardieu/swift,JGiola/swift,xwu/swift,devincoughlin/swift,xwu/swift,calebd/swift,lorentey/swift,xedin/swift,jopamer/swift,atrick/swift,rudkx/swift,aschwaighofer/swift,jtbandes/swift,natecook1000/swift,hooman/swift,natecook1000/swift,tinysun212/swift-windows,uasys/swift,danielmartin/swift,airspeedswift/swift,brentdax/swift,swiftix/swift,devincoughlin/swift,natecook1000/swift,hooman/swift,codestergit/swift,tjw/swift,Jnosh/swift,swiftix/swift,gribozavr/swift,JGiola/swift,Jnosh/swift,harlanhaskins/swift,gottesmm/swift,hughbe/swift,tkremenek/swift,CodaFi/swift,hooman/swift,alblue/swift,karwa/swift,brentdax/swift,sschiau/swift,bitjammer/swift,uasys/swift,sschiau/swift,allevato/swift,return/swift,austinzheng/swift,gregomni/swift,arvedviehweger/swift,uasys/swift,zisko/swift,stephentyrone/swift,swiftix/swift,uasys/swift,shajrawi/swift,CodaFi/swift,lorentey/swift,hughbe/swift,bitjammer/swift,austinzheng/swift,tkremenek/swift,devincoughlin/swift,stephentyrone/swift,aschwaighofer/swift,frootloops/swift,xwu/swift,manavgabhawala/swift,manavgabhawala/swift,manavgabhawala/swift,Jnosh/swift,bitjammer/swift,bitjammer/swift,OscarSwanros/swift,karwa/swift,shahmishal/swift,hughbe/swift,milseman/swift,atrick/swift,hooman/swift,austinzheng/swift,glessard/swift,JGiola/swift,danielmartin/swift,parkera/swift,parkera/swift,return/swift,zisko/swift,glessard/swift,milseman/swift,alblue/swift,tkremenek/swift,tardieu/swift,rudkx/swift,jmgc/swift,shajrawi/swift,frootloops/swift,calebd/swift,tjw/swift,karwa/swift,ahoppen/swift,allevato/swift,tjw/swift,frootloops/swift,sschiau/swift,CodaFi/swift,felix91gr/swift,shahmishal/swift,aschwaighofer/swift,arvedviehweger/swift,codestergit/swift,JaSpa/swift,stephentyrone/swift,jopamer/swift,OscarSwanros/swift,apple/swift,apple/swift,jckarter/swift,djwbrown/swift,practicalswift/swift,deyton/swift,jopamer/swift,parkera/swift,harlanhaskins/swift,jmgc/swift,roambotics/swift,djwbrown/swift,gribozavr/swift,benlangmuir/swift,manavgabhawala/swift,parkera/swift,huonw/swift,amraboelela/swift,jtbandes/swift,tinysun212/swift-windows,shajrawi/swift,felix91gr/swift,roambotics/swift,ahoppen/swift,airspeedswift/swift,jtbandes/swift,djwbrown/swift,ahoppen/swift,tinysun212/swift-windows,devincoughlin/swift,tinysun212/swift-windows,JaSpa/swift,swiftix/swift,benlangmuir/swift,jtbandes/swift,amraboelela/swift,codestergit/swift,uasys/swift,milseman/swift,devincoughlin/swift,felix91gr/swift,lorentey/swift,OscarSwanros/swift,codestergit/swift,calebd/swift,Jnosh/swift,tjw/swift,rudkx/swift,manavgabhawala/swift,JaSpa/swift,zisko/swift,xedin/swift,jopamer/swift,allevato/swift,rudkx/swift,roambotics/swift,alblue/swift,rudkx/swift,lorentey/swift,shajrawi/swift,practicalswift/swift,jckarter/swift,stephentyrone/swift,natecook1000/swift,gribozavr/swift,calebd/swift,tjw/swift,practicalswift/swift,arvedviehweger/swift,gottesmm/swift,nathawes/swift,shahmishal/swift,Jnosh/swift,Jnosh/swift,hughbe/swift,nathawes/swift,practicalswift/swift,jmgc/swift,danielmartin/swift,jckarter/swift,devincoughlin/swift,shahmishal/swift,bitjammer/swift,shahmishal/swift,jtbandes/swift,jopamer/swift,airspeedswift/swift,gribozavr/swift,nathawes/swift,swiftix/swift,ahoppen/swift,djwbrown/swift,manavgabhawala/swift,hooman/swift,Jnosh/swift,codestergit/swift,gottesmm/swift,aschwaighofer/swift,sschiau/swift,calebd/swift,natecook1000/swift,roambotics/swift,allevato/swift,zisko/swift,karwa/swift,glessard/swift,airspeedswift/swift,gottesmm/swift,huonw/swift,djwbrown/swift,harlanhaskins/swift,hughbe/swift,practicalswift/swift,brentdax/swift,jopamer/swift,gottesmm/swift,return/swift,swiftix/swift,alblue/swift,frootloops/swift,amraboelela/swift,airspeedswift/swift,calebd/swift,karwa/swift,deyton/swift,allevato/swift,nathawes/swift,brentdax/swift,milseman/swift,tkremenek/swift,deyton/swift,gribozavr/swift,djwbrown/swift,OscarSwanros/swift,shajrawi/swift,lorentey/swift,jmgc/swift,xedin/swift,amraboelela/swift,jmgc/swift,frootloops/swift,CodaFi/swift,jckarter/swift,roambotics/swift,calebd/swift,alblue/swift,karwa/swift,deyton/swift,ahoppen/swift,deyton/swift
import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) - # offset between Unix and LLVM epochs - timeVal += 946684800 - # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
Fix tests for file timestamps to drop the LLVM epoch offset.
## Code Before: import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # offset between Unix and LLVM epochs timeVal += 946684800 # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal)) ## Instruction: Fix tests for file timestamps to drop the LLVM epoch offset. ## Code After: import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
import os import sys assert len(sys.argv) >= 2 timeVal = int(sys.argv[1]) - # offset between Unix and LLVM epochs - timeVal += 946684800 - # Update the output file mtime, or create it if necessary. # From http://stackoverflow.com/a/1160227. for outputFile in sys.argv[1:]: with open(outputFile, 'a'): os.utime(outputFile, (timeVal, timeVal))
37f08dab37601b7621743467d6b78fb0306b5054
lcapy/config.py
lcapy/config.py
exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q') # Aliases for SymPy symbols aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside', 'j': 'I'} # String replacements when printing as LaTeX. For example, SymPy uses # theta for Heaviside's step. latex_string_map = {r'\theta\left': r'u\left'} import sympy as sym print_expr_map = {sym.I: 'j'} # Hack to print i as j from sympy.printing.pretty.pretty_symbology import atoms_table atoms_table['ImaginaryUnit'] = '\u2149'
exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q', 'beta', 'gamma', 'zeta') # Aliases for SymPy symbols aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside', 'j': 'I'} # String replacements when printing as LaTeX. For example, SymPy uses # theta for Heaviside's step. latex_string_map = {r'\theta\left': r'u\left'} import sympy as sym print_expr_map = {sym.I: 'j'} # Hack to pretty print i as j from sympy.printing.pretty.pretty_symbology import atoms_table atoms_table['ImaginaryUnit'] = '\u2149'
Exclude beta, gamma, zeta functions
Exclude beta, gamma, zeta functions
Python
lgpl-2.1
mph-/lcapy
- exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q') + exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q', 'beta', 'gamma', 'zeta') # Aliases for SymPy symbols aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside', 'j': 'I'} # String replacements when printing as LaTeX. For example, SymPy uses # theta for Heaviside's step. latex_string_map = {r'\theta\left': r'u\left'} import sympy as sym print_expr_map = {sym.I: 'j'} - # Hack to print i as j + # Hack to pretty print i as j from sympy.printing.pretty.pretty_symbology import atoms_table atoms_table['ImaginaryUnit'] = '\u2149'
Exclude beta, gamma, zeta functions
## Code Before: exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q') # Aliases for SymPy symbols aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside', 'j': 'I'} # String replacements when printing as LaTeX. For example, SymPy uses # theta for Heaviside's step. latex_string_map = {r'\theta\left': r'u\left'} import sympy as sym print_expr_map = {sym.I: 'j'} # Hack to print i as j from sympy.printing.pretty.pretty_symbology import atoms_table atoms_table['ImaginaryUnit'] = '\u2149' ## Instruction: Exclude beta, gamma, zeta functions ## Code After: exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q', 'beta', 'gamma', 'zeta') # Aliases for SymPy symbols aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside', 'j': 'I'} # String replacements when printing as LaTeX. For example, SymPy uses # theta for Heaviside's step. latex_string_map = {r'\theta\left': r'u\left'} import sympy as sym print_expr_map = {sym.I: 'j'} # Hack to pretty print i as j from sympy.printing.pretty.pretty_symbology import atoms_table atoms_table['ImaginaryUnit'] = '\u2149'
- exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q') + exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q', 'beta', 'gamma', 'zeta') ? +++++++++++++++++++++++++ # Aliases for SymPy symbols aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside', 'j': 'I'} # String replacements when printing as LaTeX. For example, SymPy uses # theta for Heaviside's step. latex_string_map = {r'\theta\left': r'u\left'} import sympy as sym print_expr_map = {sym.I: 'j'} - # Hack to print i as j + # Hack to pretty print i as j ? +++++++ from sympy.printing.pretty.pretty_symbology import atoms_table atoms_table['ImaginaryUnit'] = '\u2149'
3484610b0c711b000da8dc37ab76649aa4abfc58
ptt_preproc_filter.py
ptt_preproc_filter.py
import json import sys from os import scandir, remove from datetime import datetime START_DT = datetime(2016, 7, 1, 0, 0, 0) END_DT = datetime(2016, 12, 1, 0, 0, 0) DRY_RUN = True for dir_entry in scandir('preprocessed'): path = dir_entry.path with open(path) as f: # read the json into d try: d = json.load(f) except: print('[Error]', path, sep='\t', file=sys.stderr) continue # decide keep or remove authores_dt = datetime.fromtimestamp(d['authored_ts']) # print [DATETIME] path KEEP|REMOVE DRY_RUN? print(authores_dt, path, sep='\t', end='\t') if START_DT <= authores_dt < END_DT: print('KEEP') else: if DRY_RUN: print('REMOVE', 'DRY_RUN', sep='\t') else: print('REMOVE', sep='\t') remove(path)
import json import sys from pathlib import Path from os import remove from datetime import datetime START_DT = datetime(2016, 7, 1, 0, 0, 0) END_DT = datetime(2016, 12, 1, 0, 0, 0) DRY_RUN = True for path in Path('preprocessed/').iterdir(): with path.open() as f: # read the json into d try: d = json.load(f) except: print('[Error]', path, sep='\t', file=sys.stderr) continue # decide keep or remove authores_dt = datetime.fromtimestamp(d['authored_ts']) # print [DATETIME] path KEEP|REMOVE DRY_RUN? print(authores_dt, path, sep='\t', end='\t') if START_DT <= authores_dt < END_DT: print('KEEP') else: if DRY_RUN: print('REMOVE', 'DRY_RUN', sep='\t') else: print('REMOVE', sep='\t') remove(str(path))
Use pathlib in the filter
Use pathlib in the filter
Python
mit
moskytw/mining-news
import json import sys + from pathlib import Path - from os import scandir, remove + from os import remove from datetime import datetime START_DT = datetime(2016, 7, 1, 0, 0, 0) END_DT = datetime(2016, 12, 1, 0, 0, 0) DRY_RUN = True - for dir_entry in scandir('preprocessed'): + for path in Path('preprocessed/').iterdir(): - path = dir_entry.path - with open(path) as f: + with path.open() as f: # read the json into d try: d = json.load(f) except: print('[Error]', path, sep='\t', file=sys.stderr) continue # decide keep or remove authores_dt = datetime.fromtimestamp(d['authored_ts']) # print [DATETIME] path KEEP|REMOVE DRY_RUN? print(authores_dt, path, sep='\t', end='\t') if START_DT <= authores_dt < END_DT: print('KEEP') else: if DRY_RUN: print('REMOVE', 'DRY_RUN', sep='\t') else: print('REMOVE', sep='\t') - remove(path) + remove(str(path))
Use pathlib in the filter
## Code Before: import json import sys from os import scandir, remove from datetime import datetime START_DT = datetime(2016, 7, 1, 0, 0, 0) END_DT = datetime(2016, 12, 1, 0, 0, 0) DRY_RUN = True for dir_entry in scandir('preprocessed'): path = dir_entry.path with open(path) as f: # read the json into d try: d = json.load(f) except: print('[Error]', path, sep='\t', file=sys.stderr) continue # decide keep or remove authores_dt = datetime.fromtimestamp(d['authored_ts']) # print [DATETIME] path KEEP|REMOVE DRY_RUN? print(authores_dt, path, sep='\t', end='\t') if START_DT <= authores_dt < END_DT: print('KEEP') else: if DRY_RUN: print('REMOVE', 'DRY_RUN', sep='\t') else: print('REMOVE', sep='\t') remove(path) ## Instruction: Use pathlib in the filter ## Code After: import json import sys from pathlib import Path from os import remove from datetime import datetime START_DT = datetime(2016, 7, 1, 0, 0, 0) END_DT = datetime(2016, 12, 1, 0, 0, 0) DRY_RUN = True for path in Path('preprocessed/').iterdir(): with path.open() as f: # read the json into d try: d = json.load(f) except: print('[Error]', path, sep='\t', file=sys.stderr) continue # decide keep or remove authores_dt = datetime.fromtimestamp(d['authored_ts']) # print [DATETIME] path KEEP|REMOVE DRY_RUN? print(authores_dt, path, sep='\t', end='\t') if START_DT <= authores_dt < END_DT: print('KEEP') else: if DRY_RUN: print('REMOVE', 'DRY_RUN', sep='\t') else: print('REMOVE', sep='\t') remove(str(path))
import json import sys + from pathlib import Path - from os import scandir, remove ? --------- + from os import remove from datetime import datetime START_DT = datetime(2016, 7, 1, 0, 0, 0) END_DT = datetime(2016, 12, 1, 0, 0, 0) DRY_RUN = True - for dir_entry in scandir('preprocessed'): + for path in Path('preprocessed/').iterdir(): - path = dir_entry.path - with open(path) as f: ? ---- + with path.open() as f: ? +++++ # read the json into d try: d = json.load(f) except: print('[Error]', path, sep='\t', file=sys.stderr) continue # decide keep or remove authores_dt = datetime.fromtimestamp(d['authored_ts']) # print [DATETIME] path KEEP|REMOVE DRY_RUN? print(authores_dt, path, sep='\t', end='\t') if START_DT <= authores_dt < END_DT: print('KEEP') else: if DRY_RUN: print('REMOVE', 'DRY_RUN', sep='\t') else: print('REMOVE', sep='\t') - remove(path) + remove(str(path)) ? ++++ +
0471c689bbe4e5b1116c25a6ccea58588c09d4d7
jasmin_notifications/urls.py
jasmin_notifications/urls.py
__author__ = "Matt Pryor" __copyright__ = "Copyright 2015 UK Science and Technology Facilities Council" from django.conf.urls import url, include from . import views app_name = 'jasmin_notifications' urlpatterns = [ url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'), ]
__author__ = "Matt Pryor" __copyright__ = "Copyright 2015 UK Science and Technology Facilities Council" from django.conf.urls import url, include from . import views app_name = 'jasmin_notifications' urlpatterns = [ url( r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$', views.follow, name = 'follow' ), ]
Update regex to match only UUIDs
Update regex to match only UUIDs
Python
mit
cedadev/jasmin-notifications,cedadev/jasmin-notifications
__author__ = "Matt Pryor" __copyright__ = "Copyright 2015 UK Science and Technology Facilities Council" from django.conf.urls import url, include from . import views app_name = 'jasmin_notifications' urlpatterns = [ - url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'), + url( + r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$', + views.follow, + name = 'follow' + ), ]
Update regex to match only UUIDs
## Code Before: __author__ = "Matt Pryor" __copyright__ = "Copyright 2015 UK Science and Technology Facilities Council" from django.conf.urls import url, include from . import views app_name = 'jasmin_notifications' urlpatterns = [ url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'), ] ## Instruction: Update regex to match only UUIDs ## Code After: __author__ = "Matt Pryor" __copyright__ = "Copyright 2015 UK Science and Technology Facilities Council" from django.conf.urls import url, include from . import views app_name = 'jasmin_notifications' urlpatterns = [ url( r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$', views.follow, name = 'follow' ), ]
__author__ = "Matt Pryor" __copyright__ = "Copyright 2015 UK Science and Technology Facilities Council" from django.conf.urls import url, include from . import views app_name = 'jasmin_notifications' urlpatterns = [ - url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'), + url( + r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$', + views.follow, + name = 'follow' + ), ]
92adf36a7aaf6d4741944b6c606f0cf4902f232d
letters/admin.py
letters/admin.py
from dal import autocomplete from django import forms from django.contrib import admin from .models import Letter, Topic from prosopography.models import Person class PersonInlineForm(forms.ModelForm): class Meta: model = Person.letters_to.through fields = ('__all__') widgets = { 'person': autocomplete.ModelSelect2( url='people:dal-autocomplete', attrs={ 'data-placeholder': 'Type to search...', 'data-minimum-input-length': 2, } ), } class PersonInline(admin.TabularInline): model = Person.letters_to.through form = PersonInlineForm class LetterAdmin(admin.ModelAdmin): model = Letter inlines = [PersonInline] fields = ('book', 'letter', 'topics', 'date', 'citations') search_fields = ('book', 'letter', 'letters_to__nomina') list_filter = ('book',) filter_horizontal = ('citations',) admin.site.register(Letter, LetterAdmin) admin.site.register(Topic)
from dal import autocomplete from django import forms from django.contrib import admin from letters.models import Letter, Topic from prosopography.models import Person class PersonInlineForm(forms.ModelForm): """Configure inline admin form for :class:`prosopography.models.Person` """ class Meta: model = Person.letters_to.through fields = ('__all__') widgets = { 'person': autocomplete.ModelSelect2( url='people:dal-autocomplete', attrs={ 'data-placeholder': 'Type to search...', 'data-minimum-input-length': 2, } ), } class PersonInline(admin.TabularInline): """:class:`prosopography.models.Person` admin inline for M2M.""" model = Person.letters_to.through form = PersonInlineForm class LetterAdmin(admin.ModelAdmin): """ModelAdmin for :class:`letters.models.Letter`""" model = Letter inlines = [PersonInline] fields = ('book', 'letter', 'topics', 'date', 'citations') search_fields = ('book', 'letter', 'letters_to__nomina') list_filter = ('book',) filter_horizontal = ('citations',) admin.site.register(Letter, LetterAdmin) admin.site.register(Topic)
Add some documentation to letters
Add some documentation to letters
Python
mit
bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject,bwhicks/PlinyProject
from dal import autocomplete from django import forms from django.contrib import admin - from .models import Letter, Topic + from letters.models import Letter, Topic from prosopography.models import Person class PersonInlineForm(forms.ModelForm): + """Configure inline admin form for :class:`prosopography.models.Person` """ class Meta: model = Person.letters_to.through fields = ('__all__') widgets = { 'person': autocomplete.ModelSelect2( url='people:dal-autocomplete', attrs={ 'data-placeholder': 'Type to search...', 'data-minimum-input-length': 2, } ), } class PersonInline(admin.TabularInline): + """:class:`prosopography.models.Person` admin inline for M2M.""" model = Person.letters_to.through form = PersonInlineForm class LetterAdmin(admin.ModelAdmin): + """ModelAdmin for :class:`letters.models.Letter`""" model = Letter inlines = [PersonInline] fields = ('book', 'letter', 'topics', 'date', 'citations') search_fields = ('book', 'letter', 'letters_to__nomina') list_filter = ('book',) filter_horizontal = ('citations',) admin.site.register(Letter, LetterAdmin) admin.site.register(Topic)
Add some documentation to letters
## Code Before: from dal import autocomplete from django import forms from django.contrib import admin from .models import Letter, Topic from prosopography.models import Person class PersonInlineForm(forms.ModelForm): class Meta: model = Person.letters_to.through fields = ('__all__') widgets = { 'person': autocomplete.ModelSelect2( url='people:dal-autocomplete', attrs={ 'data-placeholder': 'Type to search...', 'data-minimum-input-length': 2, } ), } class PersonInline(admin.TabularInline): model = Person.letters_to.through form = PersonInlineForm class LetterAdmin(admin.ModelAdmin): model = Letter inlines = [PersonInline] fields = ('book', 'letter', 'topics', 'date', 'citations') search_fields = ('book', 'letter', 'letters_to__nomina') list_filter = ('book',) filter_horizontal = ('citations',) admin.site.register(Letter, LetterAdmin) admin.site.register(Topic) ## Instruction: Add some documentation to letters ## Code After: from dal import autocomplete from django import forms from django.contrib import admin from letters.models import Letter, Topic from prosopography.models import Person class PersonInlineForm(forms.ModelForm): """Configure inline admin form for :class:`prosopography.models.Person` """ class Meta: model = Person.letters_to.through fields = ('__all__') widgets = { 'person': autocomplete.ModelSelect2( url='people:dal-autocomplete', attrs={ 'data-placeholder': 'Type to search...', 'data-minimum-input-length': 2, } ), } class PersonInline(admin.TabularInline): """:class:`prosopography.models.Person` admin inline for M2M.""" model = Person.letters_to.through form = PersonInlineForm class LetterAdmin(admin.ModelAdmin): """ModelAdmin for :class:`letters.models.Letter`""" model = Letter inlines = [PersonInline] fields = ('book', 'letter', 'topics', 'date', 'citations') search_fields = ('book', 'letter', 'letters_to__nomina') list_filter = ('book',) filter_horizontal = ('citations',) admin.site.register(Letter, LetterAdmin) admin.site.register(Topic)
from dal import autocomplete from django import forms from django.contrib import admin - from .models import Letter, Topic + from letters.models import Letter, Topic ? +++++++ from prosopography.models import Person class PersonInlineForm(forms.ModelForm): + """Configure inline admin form for :class:`prosopography.models.Person` """ class Meta: model = Person.letters_to.through fields = ('__all__') widgets = { 'person': autocomplete.ModelSelect2( url='people:dal-autocomplete', attrs={ 'data-placeholder': 'Type to search...', 'data-minimum-input-length': 2, } ), } class PersonInline(admin.TabularInline): + """:class:`prosopography.models.Person` admin inline for M2M.""" model = Person.letters_to.through form = PersonInlineForm class LetterAdmin(admin.ModelAdmin): + """ModelAdmin for :class:`letters.models.Letter`""" model = Letter inlines = [PersonInline] fields = ('book', 'letter', 'topics', 'date', 'citations') search_fields = ('book', 'letter', 'letters_to__nomina') list_filter = ('book',) filter_horizontal = ('citations',) admin.site.register(Letter, LetterAdmin) admin.site.register(Topic)
5caa22112a11f2cabdacd8302536580012a2bf98
setup.py
setup.py
from distutils.core import setup from pexpect import __version__ setup (name='pexpect', version=__version__, py_modules=['pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], packages=['pexpect'], description='Pexpect allows easy control of interactive console applications.', author='Noah Spurrier; Thomas Kluyver; Jeff Quast', author_email='[email protected]; [email protected]; [email protected]', url='http://pexpect.readthedocs.org/', license='ISC license', platforms='UNIX', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Intended Audience :: Quality Engineers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', 'Topic :: System, System :: Archiving :: Packaging, System :: Installation/Setup', 'Topic :: System :: Shells', 'Topic :: System :: Software Distribution', 'Topic :: Terminals', ], )
from distutils.core import setup from pexpect import __version__ setup (name='pexpect', version=__version__, py_modules=['pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], packages=['pexpect'], description='Pexpect allows easy control of interactive console applications.', author='Noah Spurrier; Thomas Kluyver; Jeff Quast', author_email='[email protected]; [email protected]; [email protected]', url='http://pexpect.readthedocs.org/', license='ISC license', platforms='UNIX', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', 'Topic :: System', 'Topic :: System :: Archiving :: Packaging', 'Topic :: System :: Installation/Setup', 'Topic :: System :: Shells', 'Topic :: System :: Software Distribution', 'Topic :: Terminals', ], )
Fix Trove classifiers to allow PyPI upload
Fix Trove classifiers to allow PyPI upload
Python
isc
dongguangming/pexpect,crdoconnor/pexpect,nodish/pexpect,dongguangming/pexpect,blink1073/pexpect,Depado/pexpect,quatanium/pexpect,nodish/pexpect,bangi123/pexpect,bangi123/pexpect,Wakeupbuddy/pexpect,Wakeupbuddy/pexpect,nodish/pexpect,crdoconnor/pexpect,bangi123/pexpect,quatanium/pexpect,Depado/pexpect,crdoconnor/pexpect,Depado/pexpect,dongguangming/pexpect,Depado/pexpect,dongguangming/pexpect,bangi123/pexpect,quatanium/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect
from distutils.core import setup from pexpect import __version__ setup (name='pexpect', version=__version__, py_modules=['pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], packages=['pexpect'], description='Pexpect allows easy control of interactive console applications.', author='Noah Spurrier; Thomas Kluyver; Jeff Quast', author_email='[email protected]; [email protected]; [email protected]', url='http://pexpect.readthedocs.org/', license='ISC license', platforms='UNIX', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', - 'Intended Audience :: Quality Engineers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', - 'Topic :: System, System :: Archiving :: Packaging, System :: Installation/Setup', + 'Topic :: System', + 'Topic :: System :: Archiving :: Packaging', + 'Topic :: System :: Installation/Setup', 'Topic :: System :: Shells', 'Topic :: System :: Software Distribution', 'Topic :: Terminals', ], )
Fix Trove classifiers to allow PyPI upload
## Code Before: from distutils.core import setup from pexpect import __version__ setup (name='pexpect', version=__version__, py_modules=['pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], packages=['pexpect'], description='Pexpect allows easy control of interactive console applications.', author='Noah Spurrier; Thomas Kluyver; Jeff Quast', author_email='[email protected]; [email protected]; [email protected]', url='http://pexpect.readthedocs.org/', license='ISC license', platforms='UNIX', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Intended Audience :: Quality Engineers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', 'Topic :: System, System :: Archiving :: Packaging, System :: Installation/Setup', 'Topic :: System :: Shells', 'Topic :: System :: Software Distribution', 'Topic :: Terminals', ], ) ## Instruction: Fix Trove classifiers to allow PyPI upload ## Code After: from distutils.core import setup from pexpect import __version__ setup (name='pexpect', version=__version__, py_modules=['pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], packages=['pexpect'], description='Pexpect allows easy control of interactive console applications.', author='Noah Spurrier; Thomas Kluyver; Jeff Quast', author_email='[email protected]; [email protected]; [email protected]', url='http://pexpect.readthedocs.org/', license='ISC license', platforms='UNIX', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', 'Topic :: System', 'Topic :: System :: Archiving :: Packaging', 'Topic :: System :: Installation/Setup', 'Topic :: System :: Shells', 'Topic :: System :: Software Distribution', 'Topic :: Terminals', ], )
from distutils.core import setup from pexpect import __version__ setup (name='pexpect', version=__version__, py_modules=['pxssh', 'fdpexpect', 'FSM', 'screen', 'ANSI'], packages=['pexpect'], description='Pexpect allows easy control of interactive console applications.', author='Noah Spurrier; Thomas Kluyver; Jeff Quast', author_email='[email protected]; [email protected]; [email protected]', url='http://pexpect.readthedocs.org/', license='ISC license', platforms='UNIX', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', - 'Intended Audience :: Quality Engineers', 'License :: OSI Approved :: ISC License (ISCL)', 'Operating System :: POSIX', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', - 'Topic :: System, System :: Archiving :: Packaging, System :: Installation/Setup', + 'Topic :: System', + 'Topic :: System :: Archiving :: Packaging', + 'Topic :: System :: Installation/Setup', 'Topic :: System :: Shells', 'Topic :: System :: Software Distribution', 'Topic :: Terminals', ], )
30259313a817f2d5f147dc37ebf5ebd2c2edf943
configurator/__init__.py
configurator/__init__.py
import os import subprocess def _get_version(version=None): # overwritten by setup.py if version is None: pkg_dir = os.path.dirname(__file__) src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir)) git_dir = os.path.join(src_dir, ".git") git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir, "describe", "--tags", "--dirty") with open(os.devnull, "w") as devnull: output = subprocess.check_output(git_args, stderr=devnull) version = output.decode("utf-8").strip() return version __version__ = _get_version()
import os import subprocess def _get_version(version=None): # overwritten by setup.py if version is None: pkg_dir = os.path.dirname(__file__) src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir)) git_dir = os.path.join(src_dir, ".git") git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir, "describe", "--tags", "--dirty") with open(os.devnull, "w") as devnull: output = subprocess.check_output(git_args) version = output.decode("utf-8").strip() return version __version__ = _get_version()
Disable redirecting git output in _get_version
Disable redirecting git output in _get_version
Python
apache-2.0
yasserglez/configurator,yasserglez/configurator
import os import subprocess def _get_version(version=None): # overwritten by setup.py if version is None: pkg_dir = os.path.dirname(__file__) src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir)) git_dir = os.path.join(src_dir, ".git") git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir, "describe", "--tags", "--dirty") with open(os.devnull, "w") as devnull: - output = subprocess.check_output(git_args, stderr=devnull) + output = subprocess.check_output(git_args) version = output.decode("utf-8").strip() return version __version__ = _get_version()
Disable redirecting git output in _get_version
## Code Before: import os import subprocess def _get_version(version=None): # overwritten by setup.py if version is None: pkg_dir = os.path.dirname(__file__) src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir)) git_dir = os.path.join(src_dir, ".git") git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir, "describe", "--tags", "--dirty") with open(os.devnull, "w") as devnull: output = subprocess.check_output(git_args, stderr=devnull) version = output.decode("utf-8").strip() return version __version__ = _get_version() ## Instruction: Disable redirecting git output in _get_version ## Code After: import os import subprocess def _get_version(version=None): # overwritten by setup.py if version is None: pkg_dir = os.path.dirname(__file__) src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir)) git_dir = os.path.join(src_dir, ".git") git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir, "describe", "--tags", "--dirty") with open(os.devnull, "w") as devnull: output = subprocess.check_output(git_args) version = output.decode("utf-8").strip() return version __version__ = _get_version()
import os import subprocess def _get_version(version=None): # overwritten by setup.py if version is None: pkg_dir = os.path.dirname(__file__) src_dir = os.path.abspath(os.path.join(pkg_dir, os.pardir)) git_dir = os.path.join(src_dir, ".git") git_args = ("git", "--work-tree", src_dir, "--git-dir", git_dir, "describe", "--tags", "--dirty") with open(os.devnull, "w") as devnull: - output = subprocess.check_output(git_args, stderr=devnull) ? ---------------- + output = subprocess.check_output(git_args) version = output.decode("utf-8").strip() return version __version__ = _get_version()
794596fd6f55806eecca1c54e155533590108eee
openspending/lib/unicode_dict_reader.py
openspending/lib/unicode_dict_reader.py
import csv class EmptyCSVError(Exception): pass class UnicodeDictReader(object): def __init__(self, file_or_str, encoding='utf8', **kwargs): self.encoding = encoding self.reader = csv.DictReader(file_or_str, **kwargs) if not self.reader.fieldnames: raise EmptyCSVError("No fieldnames in CSV reader: empty file?") self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames) def __iter__(self): return (self._decode_row(row) for row in self.reader) def _decode_row(self, row): return dict( (self.keymap[k], self._decode_str(v)) for k, v in row.iteritems() ) def _decode_str(self, s): if s is None: return None return s.decode(self.encoding)
import csv class EmptyCSVError(Exception): pass class UnicodeDictReader(object): def __init__(self, fp, encoding='utf8', **kwargs): self.encoding = encoding self.reader = csv.DictReader(fp, **kwargs) if not self.reader.fieldnames: raise EmptyCSVError("No fieldnames in CSV reader: empty file?") self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames) def __iter__(self): return (self._decode_row(row) for row in self.reader) def _decode_row(self, row): return dict( (self.keymap[k], self._decode_str(v)) for k, v in row.iteritems() ) def _decode_str(self, s): if s is None: return None return s.decode(self.encoding)
Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader
Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader
Python
agpl-3.0
CivicVision/datahub,nathanhilbert/FPA_Core,USStateDept/FPA_Core,johnjohndoe/spendb,openspending/spendb,spendb/spendb,CivicVision/datahub,pudo/spendb,CivicVision/datahub,nathanhilbert/FPA_Core,pudo/spendb,openspending/spendb,johnjohndoe/spendb,spendb/spendb,nathanhilbert/FPA_Core,openspending/spendb,johnjohndoe/spendb,spendb/spendb,USStateDept/FPA_Core,pudo/spendb,USStateDept/FPA_Core
import csv class EmptyCSVError(Exception): pass class UnicodeDictReader(object): - def __init__(self, file_or_str, encoding='utf8', **kwargs): + def __init__(self, fp, encoding='utf8', **kwargs): self.encoding = encoding - self.reader = csv.DictReader(file_or_str, **kwargs) + self.reader = csv.DictReader(fp, **kwargs) if not self.reader.fieldnames: raise EmptyCSVError("No fieldnames in CSV reader: empty file?") self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames) def __iter__(self): return (self._decode_row(row) for row in self.reader) def _decode_row(self, row): return dict( (self.keymap[k], self._decode_str(v)) for k, v in row.iteritems() ) def _decode_str(self, s): if s is None: return None return s.decode(self.encoding)
Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader
## Code Before: import csv class EmptyCSVError(Exception): pass class UnicodeDictReader(object): def __init__(self, file_or_str, encoding='utf8', **kwargs): self.encoding = encoding self.reader = csv.DictReader(file_or_str, **kwargs) if not self.reader.fieldnames: raise EmptyCSVError("No fieldnames in CSV reader: empty file?") self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames) def __iter__(self): return (self._decode_row(row) for row in self.reader) def _decode_row(self, row): return dict( (self.keymap[k], self._decode_str(v)) for k, v in row.iteritems() ) def _decode_str(self, s): if s is None: return None return s.decode(self.encoding) ## Instruction: Rename misleading parameter name: UnicodeDictReader should have the same interface as csv.DictReader ## Code After: import csv class EmptyCSVError(Exception): pass class UnicodeDictReader(object): def __init__(self, fp, encoding='utf8', **kwargs): self.encoding = encoding self.reader = csv.DictReader(fp, **kwargs) if not self.reader.fieldnames: raise EmptyCSVError("No fieldnames in CSV reader: empty file?") self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames) def __iter__(self): return (self._decode_row(row) for row in self.reader) def _decode_row(self, row): return dict( (self.keymap[k], self._decode_str(v)) for k, v in row.iteritems() ) def _decode_str(self, s): if s is None: return None return s.decode(self.encoding)
import csv class EmptyCSVError(Exception): pass class UnicodeDictReader(object): - def __init__(self, file_or_str, encoding='utf8', **kwargs): ? ^^^^^^^^^^ + def __init__(self, fp, encoding='utf8', **kwargs): ? ^ self.encoding = encoding - self.reader = csv.DictReader(file_or_str, **kwargs) ? ^^^^^^^^^^ + self.reader = csv.DictReader(fp, **kwargs) ? ^ if not self.reader.fieldnames: raise EmptyCSVError("No fieldnames in CSV reader: empty file?") self.keymap = dict((k, k.decode(encoding)) for k in self.reader.fieldnames) def __iter__(self): return (self._decode_row(row) for row in self.reader) def _decode_row(self, row): return dict( (self.keymap[k], self._decode_str(v)) for k, v in row.iteritems() ) def _decode_str(self, s): if s is None: return None return s.decode(self.encoding)
24e2e391dece37c245d8a459456f3e30cd2346a8
openxc/vehicle.py
openxc/vehicle.py
from .measurements import Measurement from .sinks.base import MeasurementNotifierSink class Vehicle(object): def __init__(self, source=None): self.sources = set() self.sinks = set() self.measurements = {} self.add_source(source) self.notifier = MeasurementNotifierSink() self.sinks.add(self.notifier) def get(self, measurement_class): name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name) def listen(self, measurement_class, listener): self.notifier.register(measurement_class, listener) def unlisten(self, measurement_class, listener): self.notifier.unregister(measurement_class, listener) def _receive(self, message, **kwargs): name = message['name'] self.measurements[name] = message for sink in self.sinks: sink.receive(message, **kwargs) def _construct_measurement(self, measurement_id): raw_measurement = self.measurements.get(measurement_id, None) if raw_measurement is not None: return Measurement.from_dict(raw_measurement) def add_source(self, source): if source is not None: self.sources.add(source) source.callback = self._receive source.start() def add_sink(self, sink): if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start()
from .measurements import Measurement from .sinks.base import MeasurementNotifierSink class Vehicle(object): def __init__(self, interface=None): self.sources = set() self.sinks = set() self.measurements = {} if interface is not None: self.add_source(interface) self.controller = interface self.notifier = MeasurementNotifierSink() self.sinks.add(self.notifier) def get(self, measurement_class): name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name) def listen(self, measurement_class, listener): self.notifier.register(measurement_class, listener) def unlisten(self, measurement_class, listener): self.notifier.unregister(measurement_class, listener) def _receive(self, message, **kwargs): name = message['name'] self.measurements[name] = message for sink in self.sinks: sink.receive(message, **kwargs) def _construct_measurement(self, measurement_id): raw_measurement = self.measurements.get(measurement_id, None) if raw_measurement is not None: return Measurement.from_dict(raw_measurement) def add_source(self, source): if source is not None: self.sources.add(source) source.callback = self._receive source.start() def add_sink(self, sink): if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start()
Change constructor of Vehicle to accept an Interface instead of just Source.
Change constructor of Vehicle to accept an Interface instead of just Source.
Python
bsd-3-clause
openxc/openxc-python,openxc/openxc-python,openxc/openxc-python
from .measurements import Measurement from .sinks.base import MeasurementNotifierSink class Vehicle(object): - def __init__(self, source=None): + def __init__(self, interface=None): self.sources = set() self.sinks = set() self.measurements = {} + + if interface is not None: - self.add_source(source) + self.add_source(interface) + self.controller = interface self.notifier = MeasurementNotifierSink() self.sinks.add(self.notifier) def get(self, measurement_class): name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name) def listen(self, measurement_class, listener): self.notifier.register(measurement_class, listener) def unlisten(self, measurement_class, listener): self.notifier.unregister(measurement_class, listener) def _receive(self, message, **kwargs): name = message['name'] self.measurements[name] = message for sink in self.sinks: sink.receive(message, **kwargs) def _construct_measurement(self, measurement_id): raw_measurement = self.measurements.get(measurement_id, None) if raw_measurement is not None: return Measurement.from_dict(raw_measurement) def add_source(self, source): if source is not None: self.sources.add(source) source.callback = self._receive source.start() def add_sink(self, sink): if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start()
Change constructor of Vehicle to accept an Interface instead of just Source.
## Code Before: from .measurements import Measurement from .sinks.base import MeasurementNotifierSink class Vehicle(object): def __init__(self, source=None): self.sources = set() self.sinks = set() self.measurements = {} self.add_source(source) self.notifier = MeasurementNotifierSink() self.sinks.add(self.notifier) def get(self, measurement_class): name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name) def listen(self, measurement_class, listener): self.notifier.register(measurement_class, listener) def unlisten(self, measurement_class, listener): self.notifier.unregister(measurement_class, listener) def _receive(self, message, **kwargs): name = message['name'] self.measurements[name] = message for sink in self.sinks: sink.receive(message, **kwargs) def _construct_measurement(self, measurement_id): raw_measurement = self.measurements.get(measurement_id, None) if raw_measurement is not None: return Measurement.from_dict(raw_measurement) def add_source(self, source): if source is not None: self.sources.add(source) source.callback = self._receive source.start() def add_sink(self, sink): if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start() ## Instruction: Change constructor of Vehicle to accept an Interface instead of just Source. ## Code After: from .measurements import Measurement from .sinks.base import MeasurementNotifierSink class Vehicle(object): def __init__(self, interface=None): self.sources = set() self.sinks = set() self.measurements = {} if interface is not None: self.add_source(interface) self.controller = interface self.notifier = MeasurementNotifierSink() self.sinks.add(self.notifier) def get(self, measurement_class): name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name) def listen(self, measurement_class, listener): self.notifier.register(measurement_class, listener) def unlisten(self, measurement_class, listener): self.notifier.unregister(measurement_class, listener) def _receive(self, message, **kwargs): name = message['name'] self.measurements[name] = message for sink in self.sinks: sink.receive(message, **kwargs) def _construct_measurement(self, measurement_id): raw_measurement = self.measurements.get(measurement_id, None) if raw_measurement is not None: return Measurement.from_dict(raw_measurement) def add_source(self, source): if source is not None: self.sources.add(source) source.callback = self._receive source.start() def add_sink(self, sink): if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start()
from .measurements import Measurement from .sinks.base import MeasurementNotifierSink class Vehicle(object): - def __init__(self, source=None): ? ^^^ + def __init__(self, interface=None): ? ^^^^ ++ self.sources = set() self.sinks = set() self.measurements = {} + + if interface is not None: - self.add_source(source) ? ^^^ + self.add_source(interface) ? ++++ ^^^^ ++ + self.controller = interface self.notifier = MeasurementNotifierSink() self.sinks.add(self.notifier) def get(self, measurement_class): name = Measurement.name_from_class(measurement_class) return self._construct_measurement(name) def listen(self, measurement_class, listener): self.notifier.register(measurement_class, listener) def unlisten(self, measurement_class, listener): self.notifier.unregister(measurement_class, listener) def _receive(self, message, **kwargs): name = message['name'] self.measurements[name] = message for sink in self.sinks: sink.receive(message, **kwargs) def _construct_measurement(self, measurement_id): raw_measurement = self.measurements.get(measurement_id, None) if raw_measurement is not None: return Measurement.from_dict(raw_measurement) def add_source(self, source): if source is not None: self.sources.add(source) source.callback = self._receive source.start() def add_sink(self, sink): if sink is not None: self.sinks.add(sink) if hasattr(sink, 'start'): sink.start()
6784c455cf93c16237661d6d9fed6af06726a880
conveyor/processor.py
conveyor/processor.py
from __future__ import absolute_import from __future__ import division import collections from xmlrpc2 import client as xmlrpc2 class BaseProcessor(object): def __init__(self, index, *args, **kwargs): super(BaseProcessor, self).__init__(*args, **kwargs) self.index = index self.client = xmlrpc2.Client(self.index) def process(self): raise NotImplementedError def get_releases(self, name, version=None): if version is None: versions = self.client.package_releases(name, True) else: versions = [version] for version in versions: item = self.client.release_data(name, version) url = self.client.release_urls(item["name"], item["version"]) if isinstance(url, collections.Mapping): urls = [url] elif isinstance(url, collections.Iterable): urls = url else: raise RuntimeError("Do not understand the type returned by release_urls") item.update({"files": urls}) yield item class BulkProcessor(BaseProcessor): def process(self): pass
from __future__ import absolute_import from __future__ import division import collections import slumber import slumber.exceptions import xmlrpc2.client class BaseProcessor(object): def __init__(self, index, warehouse, *args, **kwargs): super(BaseProcessor, self).__init__(*args, **kwargs) wargs, wkwargs = warehouse self.client = xmlrpc2.client.Client(index) self.warehouse = slumber.API(*wargs, **wkwargs) def process(self): raise NotImplementedError def get_releases(self, name, version=None): if version is None: versions = self.client.package_releases(name, True) else: versions = [version] for version in versions: item = self.client.release_data(name, version) url = self.client.release_urls(item["name"], item["version"]) if isinstance(url, collections.Mapping): urls = [url] elif isinstance(url, collections.Iterable): urls = url else: raise RuntimeError("Do not understand the type returned by release_urls") item.update({"files": urls}) yield item class BulkProcessor(BaseProcessor): def process(self): pass
Switch to more obvious imports
Switch to more obvious imports
Python
bsd-2-clause
crateio/carrier
from __future__ import absolute_import from __future__ import division import collections - from xmlrpc2 import client as xmlrpc2 + import slumber + import slumber.exceptions + import xmlrpc2.client class BaseProcessor(object): - def __init__(self, index, *args, **kwargs): + def __init__(self, index, warehouse, *args, **kwargs): super(BaseProcessor, self).__init__(*args, **kwargs) - self.index = index + wargs, wkwargs = warehouse + - self.client = xmlrpc2.Client(self.index) + self.client = xmlrpc2.client.Client(index) + self.warehouse = slumber.API(*wargs, **wkwargs) def process(self): raise NotImplementedError def get_releases(self, name, version=None): if version is None: versions = self.client.package_releases(name, True) else: versions = [version] for version in versions: item = self.client.release_data(name, version) url = self.client.release_urls(item["name"], item["version"]) if isinstance(url, collections.Mapping): urls = [url] elif isinstance(url, collections.Iterable): urls = url else: raise RuntimeError("Do not understand the type returned by release_urls") item.update({"files": urls}) yield item class BulkProcessor(BaseProcessor): def process(self): pass
Switch to more obvious imports
## Code Before: from __future__ import absolute_import from __future__ import division import collections from xmlrpc2 import client as xmlrpc2 class BaseProcessor(object): def __init__(self, index, *args, **kwargs): super(BaseProcessor, self).__init__(*args, **kwargs) self.index = index self.client = xmlrpc2.Client(self.index) def process(self): raise NotImplementedError def get_releases(self, name, version=None): if version is None: versions = self.client.package_releases(name, True) else: versions = [version] for version in versions: item = self.client.release_data(name, version) url = self.client.release_urls(item["name"], item["version"]) if isinstance(url, collections.Mapping): urls = [url] elif isinstance(url, collections.Iterable): urls = url else: raise RuntimeError("Do not understand the type returned by release_urls") item.update({"files": urls}) yield item class BulkProcessor(BaseProcessor): def process(self): pass ## Instruction: Switch to more obvious imports ## Code After: from __future__ import absolute_import from __future__ import division import collections import slumber import slumber.exceptions import xmlrpc2.client class BaseProcessor(object): def __init__(self, index, warehouse, *args, **kwargs): super(BaseProcessor, self).__init__(*args, **kwargs) wargs, wkwargs = warehouse self.client = xmlrpc2.client.Client(index) self.warehouse = slumber.API(*wargs, **wkwargs) def process(self): raise NotImplementedError def get_releases(self, name, version=None): if version is None: versions = self.client.package_releases(name, True) else: versions = [version] for version in versions: item = self.client.release_data(name, version) url = self.client.release_urls(item["name"], item["version"]) if isinstance(url, collections.Mapping): urls = [url] elif isinstance(url, collections.Iterable): urls = url else: raise RuntimeError("Do not understand the type returned by release_urls") item.update({"files": urls}) yield item class BulkProcessor(BaseProcessor): def process(self): pass
from __future__ import absolute_import from __future__ import division import collections - from xmlrpc2 import client as xmlrpc2 + import slumber + import slumber.exceptions + import xmlrpc2.client class BaseProcessor(object): - def __init__(self, index, *args, **kwargs): + def __init__(self, index, warehouse, *args, **kwargs): ? +++++++++++ super(BaseProcessor, self).__init__(*args, **kwargs) - self.index = index + wargs, wkwargs = warehouse + - self.client = xmlrpc2.Client(self.index) ? ----- + self.client = xmlrpc2.client.Client(index) ? +++++++ + self.warehouse = slumber.API(*wargs, **wkwargs) def process(self): raise NotImplementedError def get_releases(self, name, version=None): if version is None: versions = self.client.package_releases(name, True) else: versions = [version] for version in versions: item = self.client.release_data(name, version) url = self.client.release_urls(item["name"], item["version"]) if isinstance(url, collections.Mapping): urls = [url] elif isinstance(url, collections.Iterable): urls = url else: raise RuntimeError("Do not understand the type returned by release_urls") item.update({"files": urls}) yield item class BulkProcessor(BaseProcessor): def process(self): pass
9e7aed847c2d5fcd6e00bc787d8b3558b590f605
api/logs/urls.py
api/logs/urls.py
from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), ]
from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ]
Add /v2/logs/log_id/added_contributors/ to list of URL's.
Add /v2/logs/log_id/added_contributors/ to list of URL's.
Python
apache-2.0
abought/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,chennan47/osf.io,RomanZWang/osf.io,alexschiller/osf.io,billyhunt/osf.io,crcresearch/osf.io,saradbowman/osf.io,acshi/osf.io,jnayak1/osf.io,RomanZWang/osf.io,emetsger/osf.io,KAsante95/osf.io,zachjanicki/osf.io,mattclark/osf.io,RomanZWang/osf.io,emetsger/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,emetsger/osf.io,billyhunt/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,kwierman/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,aaxelb/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,cslzchen/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,hmoco/osf.io,erinspace/osf.io,doublebits/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,abought/osf.io,leb2dg/osf.io,adlius/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,binoculars/osf.io,GageGaskins/osf.io,hmoco/osf.io,GageGaskins/osf.io,kwierman/osf.io,hmoco/osf.io,caneruguz/osf.io,SSJohns/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,mluo613/osf.io,rdhyee/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,chennan47/osf.io,icereval/osf.io,rdhyee/osf.io,doublebits/osf.io,adlius/osf.io,caneruguz/osf.io,amyshi188/osf.io,jnayak1/osf.io,mluke93/osf.io,erinspace/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,laurenrevere/osf.io,acshi/osf.io,Johnetordoff/osf.io,acshi/osf.io,crcresearch/osf.io,cwisecarver/osf.io,binoculars/osf.io,brianjgeiger/osf.io,sloria/osf.io,zachjanicki/osf.io,baylee-d/osf.io,KAsante95/osf.io,caseyrollins/osf.io,doublebits/osf.io,brandonPurvis/osf.io,chrisseto/osf.io,mattclark/osf.io,pattisdr/osf.io,baylee-d/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,icereval/osf.io,wearpants/osf.io,aaxelb/osf.io,caseyrollins/osf.io,erinspace/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,mluke93/osf.io,leb2dg/osf.io,Nesiehr/osf.io,amyshi188/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sloria/osf.io,kwierman/osf.io,samchrisinger/osf.io,doublebits/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,mluke93/osf.io,mfraezz/osf.io,saradbowman/osf.io,kch8qx/osf.io,KAsante95/osf.io,cwisecarver/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,chrisseto/osf.io,acshi/osf.io,amyshi188/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,mattclark/osf.io,cslzchen/osf.io,Nesiehr/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,acshi/osf.io,cwisecarver/osf.io,kwierman/osf.io,abought/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,felliott/osf.io,adlius/osf.io,felliott/osf.io,jnayak1/osf.io,binoculars/osf.io,DanielSBrown/osf.io,zamattiac/osf.io,billyhunt/osf.io,abought/osf.io,mluo613/osf.io,zamattiac/osf.io,GageGaskins/osf.io,mluo613/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,hmoco/osf.io,wearpants/osf.io,TomBaxter/osf.io,aaxelb/osf.io,alexschiller/osf.io,caseyrollins/osf.io,mfraezz/osf.io,doublebits/osf.io,zamattiac/osf.io,sloria/osf.io,pattisdr/osf.io,pattisdr/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,felliott/osf.io,monikagrabowska/osf.io,wearpants/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,adlius/osf.io,emetsger/osf.io,RomanZWang/osf.io,chrisseto/osf.io,kch8qx/osf.io,billyhunt/osf.io,chennan47/osf.io,kch8qx/osf.io,icereval/osf.io,TomHeatwole/osf.io,mluo613/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,kch8qx/osf.io
from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), + url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ]
Add /v2/logs/log_id/added_contributors/ to list of URL's.
## Code Before: from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), ] ## Instruction: Add /v2/logs/log_id/added_contributors/ to list of URL's. ## Code After: from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ]
from django.conf.urls import url from api.logs import views urlpatterns = [ url(r'^(?P<log_id>\w+)/$', views.NodeLogDetail.as_view(), name=views.NodeLogDetail.view_name), url(r'^(?P<log_id>\w+)/nodes/$', views.LogNodeList.as_view(), name=views.LogNodeList.view_name), + url(r'^(?P<log_id>\w+)/added_contributors/$', views.NodeLogAddedContributors.as_view(), name=views.NodeLogAddedContributors.view_name), ]
c3838132d3a4622ab4c9660f574e8219ac5e164b
mysite/core/tasks.py
mysite/core/tasks.py
from intercom.client import Client from django.conf import settings from celery import shared_task intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN) @shared_task def intercom_event(event_name, created_at, email, metadata): intercom.events.create( event_name=event_name, created_at=created_at, email=email, metadata=metadata )
import logging from intercom.client import Client from django.conf import settings from celery import shared_task log = logging.getLogger(__name__) intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN) @shared_task def intercom_event(event_name, created_at, email, metadata): intercom.events.create( event_name=event_name, created_at=created_at, email=email, metadata=metadata ) log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
Add logging for Intercom event generation
Add logging for Intercom event generation
Python
apache-2.0
raccoongang/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2,cjlee112/socraticqs2,raccoongang/socraticqs2,cjlee112/socraticqs2
+ import logging + from intercom.client import Client from django.conf import settings from celery import shared_task + log = logging.getLogger(__name__) intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN) @shared_task - def intercom_event(event_name, created_at, email, metadata): + def intercom_event(event_name, created_at, email, metadata): intercom.events.create( event_name=event_name, created_at=created_at, email=email, metadata=metadata ) + log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
Add logging for Intercom event generation
## Code Before: from intercom.client import Client from django.conf import settings from celery import shared_task intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN) @shared_task def intercom_event(event_name, created_at, email, metadata): intercom.events.create( event_name=event_name, created_at=created_at, email=email, metadata=metadata ) ## Instruction: Add logging for Intercom event generation ## Code After: import logging from intercom.client import Client from django.conf import settings from celery import shared_task log = logging.getLogger(__name__) intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN) @shared_task def intercom_event(event_name, created_at, email, metadata): intercom.events.create( event_name=event_name, created_at=created_at, email=email, metadata=metadata ) log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
+ import logging + from intercom.client import Client from django.conf import settings from celery import shared_task + log = logging.getLogger(__name__) intercom = Client(personal_access_token=settings.INTERCOM_ACCESS_TOKEN) @shared_task - def intercom_event(event_name, created_at, email, metadata): + def intercom_event(event_name, created_at, email, metadata): ? ++++ intercom.events.create( event_name=event_name, created_at=created_at, email=email, metadata=metadata ) + log.info("{}:{}:{}:{}".format(event_name, created_at, email, metadata))
27abcf86612e186f00cb9b91e604a222c9666438
app/eve_proxy/tasks.py
app/eve_proxy/tasks.py
from django.conf import settings import logging from datetime import datetime, timedelta from celery.task import task from eve_proxy.models import CachedDocument, ApiAccessLog @task(ignore_result=True) def clear_stale_cache(cache_extension=0): log = clear_stale_cache.get_logger() time = datetime.utcnow() - timedelta(seconds=cache_extension) objs = CachedDocument.objects.filter(cached_until__lt=time) log.info('Removing %s stale cache documents' % objs.count()) objs.delete() @task(ignore_result=True) def clear_old_logs(): log = clear_old_logs.get_logger() time = datetime.utcnow() - timedelta(days=settings.EVE_PROXY_KEEP_LOGS) objs = ApiAccessLog.objects.filter(time_access__lt=time) log.info('Removing %s old access logs' % objs.count()) objs.delete()
import logging from datetime import datetime, timedelta from django.conf import settings from django.utils.timezone import now from celery.task import task from eve_proxy.models import CachedDocument, ApiAccessLog @task(ignore_result=True) def clear_stale_cache(cache_extension=0): log = clear_stale_cache.get_logger() time = now() - timedelta(seconds=cache_extension) objs = CachedDocument.objects.filter(cached_until__lt=time) log.info('Removing %s stale cache documents' % objs.count()) objs.delete() @task(ignore_result=True) def clear_old_logs(): log = clear_old_logs.get_logger() time = now() - timedelta(days=getattr(settings, 'EVE_PROXY_KEEP_LOGS', 30)) objs = ApiAccessLog.objects.filter(time_access__lt=time) log.info('Removing %s old access logs' % objs.count()) objs.delete()
Update eve_proxy taks for Django 1.4
Update eve_proxy taks for Django 1.4
Python
bsd-3-clause
nikdoof/test-auth
- from django.conf import settings import logging from datetime import datetime, timedelta + + from django.conf import settings + from django.utils.timezone import now from celery.task import task from eve_proxy.models import CachedDocument, ApiAccessLog + @task(ignore_result=True) def clear_stale_cache(cache_extension=0): log = clear_stale_cache.get_logger() - time = datetime.utcnow() - timedelta(seconds=cache_extension) + time = now() - timedelta(seconds=cache_extension) objs = CachedDocument.objects.filter(cached_until__lt=time) log.info('Removing %s stale cache documents' % objs.count()) objs.delete() @task(ignore_result=True) def clear_old_logs(): log = clear_old_logs.get_logger() - time = datetime.utcnow() - timedelta(days=settings.EVE_PROXY_KEEP_LOGS) + time = now() - timedelta(days=getattr(settings, 'EVE_PROXY_KEEP_LOGS', 30)) objs = ApiAccessLog.objects.filter(time_access__lt=time) log.info('Removing %s old access logs' % objs.count()) objs.delete()
Update eve_proxy taks for Django 1.4
## Code Before: from django.conf import settings import logging from datetime import datetime, timedelta from celery.task import task from eve_proxy.models import CachedDocument, ApiAccessLog @task(ignore_result=True) def clear_stale_cache(cache_extension=0): log = clear_stale_cache.get_logger() time = datetime.utcnow() - timedelta(seconds=cache_extension) objs = CachedDocument.objects.filter(cached_until__lt=time) log.info('Removing %s stale cache documents' % objs.count()) objs.delete() @task(ignore_result=True) def clear_old_logs(): log = clear_old_logs.get_logger() time = datetime.utcnow() - timedelta(days=settings.EVE_PROXY_KEEP_LOGS) objs = ApiAccessLog.objects.filter(time_access__lt=time) log.info('Removing %s old access logs' % objs.count()) objs.delete() ## Instruction: Update eve_proxy taks for Django 1.4 ## Code After: import logging from datetime import datetime, timedelta from django.conf import settings from django.utils.timezone import now from celery.task import task from eve_proxy.models import CachedDocument, ApiAccessLog @task(ignore_result=True) def clear_stale_cache(cache_extension=0): log = clear_stale_cache.get_logger() time = now() - timedelta(seconds=cache_extension) objs = CachedDocument.objects.filter(cached_until__lt=time) log.info('Removing %s stale cache documents' % objs.count()) objs.delete() @task(ignore_result=True) def clear_old_logs(): log = clear_old_logs.get_logger() time = now() - timedelta(days=getattr(settings, 'EVE_PROXY_KEEP_LOGS', 30)) objs = ApiAccessLog.objects.filter(time_access__lt=time) log.info('Removing %s old access logs' % objs.count()) objs.delete()
- from django.conf import settings import logging from datetime import datetime, timedelta + + from django.conf import settings + from django.utils.timezone import now from celery.task import task from eve_proxy.models import CachedDocument, ApiAccessLog + @task(ignore_result=True) def clear_stale_cache(cache_extension=0): log = clear_stale_cache.get_logger() - time = datetime.utcnow() - timedelta(seconds=cache_extension) ? ------------ + time = now() - timedelta(seconds=cache_extension) objs = CachedDocument.objects.filter(cached_until__lt=time) log.info('Removing %s stale cache documents' % objs.count()) objs.delete() @task(ignore_result=True) def clear_old_logs(): log = clear_old_logs.get_logger() - time = datetime.utcnow() - timedelta(days=settings.EVE_PROXY_KEEP_LOGS) ? ------------ ^ + time = now() - timedelta(days=getattr(settings, 'EVE_PROXY_KEEP_LOGS', 30)) ? ++++++++ ^^^ +++++ + objs = ApiAccessLog.objects.filter(time_access__lt=time) log.info('Removing %s old access logs' % objs.count()) objs.delete()
16dda42316176f0ad9c747731764855792fe88d6
lymph/utils/observables.py
lymph/utils/observables.py
class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, action, callback): self.observers.setdefault(action, []).append(callback)
class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): kwargs.setdefault('action', action) for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, actions, callback): if not isinstance(actions, (tuple, list)): actions = (actions,) for action in actions: self.observers.setdefault(action, []).append(callback)
Allow observing more than one action at once
Allow observing more than one action at once
Python
apache-2.0
lyudmildrx/lymph,mouadino/lymph,Drahflow/lymph,itakouna/lymph,vpikulik/lymph,deliveryhero/lymph,kstrempel/lymph,alazaro/lymph,lyudmildrx/lymph,itakouna/lymph,mamachanko/lymph,torte/lymph,mamachanko/lymph,lyudmildrx/lymph,alazaro/lymph,mouadino/lymph,mamachanko/lymph,mouadino/lymph,alazaro/lymph,itakouna/lymph,dushyant88/lymph
+ class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): + kwargs.setdefault('action', action) for callback in self.observers.get(action, ()): callback(*args, **kwargs) - def observe(self, action, callback): + def observe(self, actions, callback): + if not isinstance(actions, (tuple, list)): + actions = (actions,) + for action in actions: - self.observers.setdefault(action, []).append(callback) + self.observers.setdefault(action, []).append(callback)
Allow observing more than one action at once
## Code Before: class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, action, callback): self.observers.setdefault(action, []).append(callback) ## Instruction: Allow observing more than one action at once ## Code After: class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): kwargs.setdefault('action', action) for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, actions, callback): if not isinstance(actions, (tuple, list)): actions = (actions,) for action in actions: self.observers.setdefault(action, []).append(callback)
+ class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): + kwargs.setdefault('action', action) for callback in self.observers.get(action, ()): callback(*args, **kwargs) - def observe(self, action, callback): + def observe(self, actions, callback): ? + + if not isinstance(actions, (tuple, list)): + actions = (actions,) + for action in actions: - self.observers.setdefault(action, []).append(callback) + self.observers.setdefault(action, []).append(callback) ? ++++
9846559d9164216924e5f8bb1544148b3e6965b6
tensorflow_time_two/python/ops/time_two_ops_test.py
tensorflow_time_two/python/ops/time_two_ops_test.py
"""Tests for time_two ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.platform import test from time_two_ops import time_two class TimeTwoTest(test.TestCase): def testTimeTwo(self): with self.test_session(): self.assertAllClose( time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) if __name__ == '__main__': test.main()
"""Tests for time_two ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.platform import test try: from tensorflow_time_two.python.ops import time_two_ops except ImportError: import time_two_ops class TimeTwoTest(test.TestCase): def testTimeTwo(self): with self.test_session(): self.assertAllClose( time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) if __name__ == '__main__': test.main()
Make test works with make and bazel
Make test works with make and bazel
Python
apache-2.0
tensorflow/custom-op,tensorflow/custom-op,tensorflow/custom-op
"""Tests for time_two ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.platform import test - from time_two_ops import time_two + try: + from tensorflow_time_two.python.ops import time_two_ops + except ImportError: + import time_two_ops class TimeTwoTest(test.TestCase): def testTimeTwo(self): with self.test_session(): self.assertAllClose( - time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) + time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) if __name__ == '__main__': test.main()
Make test works with make and bazel
## Code Before: """Tests for time_two ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.platform import test from time_two_ops import time_two class TimeTwoTest(test.TestCase): def testTimeTwo(self): with self.test_session(): self.assertAllClose( time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) if __name__ == '__main__': test.main() ## Instruction: Make test works with make and bazel ## Code After: """Tests for time_two ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.platform import test try: from tensorflow_time_two.python.ops import time_two_ops except ImportError: import time_two_ops class TimeTwoTest(test.TestCase): def testTimeTwo(self): with self.test_session(): self.assertAllClose( time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) if __name__ == '__main__': test.main()
"""Tests for time_two ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.platform import test - from time_two_ops import time_two + try: + from tensorflow_time_two.python.ops import time_two_ops + except ImportError: + import time_two_ops class TimeTwoTest(test.TestCase): def testTimeTwo(self): with self.test_session(): self.assertAllClose( - time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) + time_two_ops.time_two([[1, 2], [3, 4]]).eval(), np.array([[2, 4], [6, 8]])) ? +++++++++++++ if __name__ == '__main__': test.main()
dcf2dcb41e66ce01e386d526370ce23064e6e2a3
schemer/exceptions.py
schemer/exceptions.py
class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format(path) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format('\"{}\"'.format(path)) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
Improve formatting of schema format exception messages
Improve formatting of schema format exception messages
Python
mit
gamechanger/schemer
class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): - self._message = message.format(path) + self._message = message.format('\"{}\"'.format(path)) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
Improve formatting of schema format exception messages
## Code Before: class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format(path) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors) ## Instruction: Improve formatting of schema format exception messages ## Code After: class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): self._message = message.format('\"{}\"'.format(path)) self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
class SchemaFormatException(Exception): """Exception which encapsulates a problem found during the verification of a a schema.""" def __init__(self, message, path): - self._message = message.format(path) + self._message = message.format('\"{}\"'.format(path)) ? ++++++++++++++++ + self._path = path @property def path(self): """The field path at which the format error was found.""" return self._path def __str__(self): return self._message class ValidationException(Exception): """Exception which is thrown in response to the failed validation of a document against it's associated schema.""" def __init__(self, errors): self._errors = errors @property def errors(self): """A dict containing the validation error(s) found at each field path.""" return self._errors def __str__(self): return repr(self._errors)
eac05dfe5c4190cc10b00d18aa9f03344eb3a6ea
fastats/core/single_pass.py
fastats/core/single_pass.py
import numpy as np from fastats.core.decorator import fs def value(x): # pragma: no cover return x @fs def single_pass(x): """ Performs a single iteration over the first dimension of `x`. Tests ----- >>> def square(x): ... return x * x >>> data = np.arange(10) >>> single_pass(data, value=square) array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> import math >>> def calc(x): ... return 2 * math.log(x) >>> single_pass(data[1:], value=calc) array([0, 1, 2, 2, 3, 3, 3, 4, 4]) """ result = np.zeros_like(x) for i in range(x.shape[0]): result[i] = value(x[i]) return result if __name__ == '__main__': import pytest pytest.main([__file__])
import numpy as np from fastats.core.decorator import fs def value(x): return x @fs def single_pass(x): """ Performs a single iteration over the first dimension of `x`. Tests ----- >>> def square(x): ... return x * x >>> data = np.arange(10) >>> single_pass(data, value=square) array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> import math >>> def calc(x): ... return 2 * math.log(x) >>> single_pass(data[1:], value=calc) array([0, 1, 2, 2, 3, 3, 3, 4, 4]) """ result = np.zeros_like(x) for i in range(x.shape[0]): result[i] = value(x[i]) return result if __name__ == '__main__': import pytest pytest.main([__file__])
Remove spurious no cover pragma
Remove spurious no cover pragma
Python
mit
dwillmer/fastats,fastats/fastats
import numpy as np from fastats.core.decorator import fs - def value(x): # pragma: no cover + def value(x): return x @fs def single_pass(x): """ Performs a single iteration over the first dimension of `x`. Tests ----- >>> def square(x): ... return x * x >>> data = np.arange(10) >>> single_pass(data, value=square) array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> import math >>> def calc(x): ... return 2 * math.log(x) >>> single_pass(data[1:], value=calc) array([0, 1, 2, 2, 3, 3, 3, 4, 4]) """ result = np.zeros_like(x) for i in range(x.shape[0]): result[i] = value(x[i]) return result if __name__ == '__main__': import pytest pytest.main([__file__])
Remove spurious no cover pragma
## Code Before: import numpy as np from fastats.core.decorator import fs def value(x): # pragma: no cover return x @fs def single_pass(x): """ Performs a single iteration over the first dimension of `x`. Tests ----- >>> def square(x): ... return x * x >>> data = np.arange(10) >>> single_pass(data, value=square) array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> import math >>> def calc(x): ... return 2 * math.log(x) >>> single_pass(data[1:], value=calc) array([0, 1, 2, 2, 3, 3, 3, 4, 4]) """ result = np.zeros_like(x) for i in range(x.shape[0]): result[i] = value(x[i]) return result if __name__ == '__main__': import pytest pytest.main([__file__]) ## Instruction: Remove spurious no cover pragma ## Code After: import numpy as np from fastats.core.decorator import fs def value(x): return x @fs def single_pass(x): """ Performs a single iteration over the first dimension of `x`. Tests ----- >>> def square(x): ... return x * x >>> data = np.arange(10) >>> single_pass(data, value=square) array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> import math >>> def calc(x): ... return 2 * math.log(x) >>> single_pass(data[1:], value=calc) array([0, 1, 2, 2, 3, 3, 3, 4, 4]) """ result = np.zeros_like(x) for i in range(x.shape[0]): result[i] = value(x[i]) return result if __name__ == '__main__': import pytest pytest.main([__file__])
import numpy as np from fastats.core.decorator import fs - def value(x): # pragma: no cover + def value(x): return x @fs def single_pass(x): """ Performs a single iteration over the first dimension of `x`. Tests ----- >>> def square(x): ... return x * x >>> data = np.arange(10) >>> single_pass(data, value=square) array([ 0, 1, 4, 9, 16, 25, 36, 49, 64, 81]) >>> import math >>> def calc(x): ... return 2 * math.log(x) >>> single_pass(data[1:], value=calc) array([0, 1, 2, 2, 3, 3, 3, 4, 4]) """ result = np.zeros_like(x) for i in range(x.shape[0]): result[i] = value(x[i]) return result if __name__ == '__main__': import pytest pytest.main([__file__])
98f8a8fb51ae539aad6a3e2faebced4b806c3f0c
filer/utils/generate_filename.py
filer/utils/generate_filename.py
from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str) return os.path.join(random_path, get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str, get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
Build random path using os.path.join
Build random path using os.path.join
Python
bsd-3-clause
o-zander/django-filer,nimbis/django-filer,nimbis/django-filer,webu/django-filer,divio/django-filer,matthiask/django-filer,skirsdeda/django-filer,stefanfoulis/django-filer,sopraux/django-filer,Flight/django-filer,sopraux/django-filer,belimawr/django-filer,matthiask/django-filer,o-zander/django-filer,DylannCordel/django-filer,jakob-o/django-filer,lory87/django-filer,stefanfoulis/django-filer,o-zander/django-filer,skirsdeda/django-filer,nephila/django-filer,DylannCordel/django-filer,mkoistinen/django-filer,stefanfoulis/django-filer,DylannCordel/django-filer,skirsdeda/django-filer,divio/django-filer,vechorko/django-filer,nephila/django-filer,jakob-o/django-filer,nimbis/django-filer,jakob-o/django-filer,webu/django-filer,Flight/django-filer,belimawr/django-filer,civicresourcegroup/django-filer,kriwil/django-filer,belimawr/django-filer,civicresourcegroup/django-filer,mkoistinen/django-filer,mkoistinen/django-filer,Flight/django-filer,lory87/django-filer,kriwil/django-filer,stefanfoulis/django-filer,Flight/django-filer,vstoykov/django-filer,civicresourcegroup/django-filer,lory87/django-filer,Flight/django-filer,vstoykov/django-filer,skirsdeda/django-filer,kriwil/django-filer,divio/django-filer,webu/django-filer,nephila/django-filer,sopraux/django-filer,o-zander/django-filer,vechorko/django-filer,DylannCordel/django-filer,mkoistinen/django-filer,vstoykov/django-filer,jakob-o/django-filer,kriwil/django-filer,nimbis/django-filer,vechorko/django-filer,stefanfoulis/django-filer,belimawr/django-filer,skirsdeda/django-filer,webu/django-filer,matthiask/django-filer,lory87/django-filer,sopraux/django-filer,vechorko/django-filer,divio/django-filer,civicresourcegroup/django-filer,DylannCordel/django-filer,matthiask/django-filer,jakob-o/django-filer
from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) - random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str) - return os.path.join(random_path, get_valid_filename(filename)) + return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str, + get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
Build random path using os.path.join
## Code Before: from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str) return os.path.join(random_path, get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str) ## Instruction: Build random path using os.path.join ## Code After: from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str, get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) - random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str) - return os.path.join(random_path, get_valid_filename(filename)) + return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str, + get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
97cc8b9dd87d43e38d6ff2a20dc4cab2ffcc3d54
tests/test_requests.py
tests/test_requests.py
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
Use EURUSD for test order.
Use EURUSD for test order.
Python
bsd-2-clause
erdewit/ib_insync,erdewit/ib_insync
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): - contract = ibi.Stock('MMM', 'SMART', 'USD') + contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
Use EURUSD for test order.
## Code Before: import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Stock('MMM', 'SMART', 'USD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary) ## Instruction: Use EURUSD for test order. ## Code After: import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
import pytest import ib_insync as ibi pytestmark = pytest.mark.asyncio async def test_request_error_raised(ib): - contract = ibi.Stock('MMM', 'SMART', 'USD') + contract = ibi.Forex('EURUSD') order = ibi.MarketOrder('BUY', 100) orderState = await ib.whatIfOrderAsync(contract, order) assert orderState.commission > 0 ib.RaiseRequestErrors = True badContract = ibi.Stock('XXX') with pytest.raises(ibi.RequestError) as exc_info: await ib.whatIfOrderAsync(badContract, order) assert exc_info.value.code == 321 async def test_account_summary(ib): summary = await ib.accountSummaryAsync() assert summary assert all(isinstance(value, ibi.AccountValue) for value in summary)
2a7ce1ac70f8767e9d2b2a9f1d335cfcc63a92b6
rplugin/python3/LanguageClient/logger.py
rplugin/python3/LanguageClient/logger.py
import logging import tempfile logger = logging.getLogger("LanguageClient") with tempfile.NamedTemporaryFile( prefix="LanguageClient-", suffix=".log", delete=False) as tmp: tmpname = tmp.name fileHandler = logging.FileHandler(filename=tmpname) fileHandler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-8s %(message)s", "%H:%M:%S")) logger.addHandler(fileHandler) logger.setLevel(logging.WARN)
import logging logger = logging.getLogger("LanguageClient") fileHandler = logging.FileHandler(filename="/tmp/LanguageClient.log") fileHandler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-8s %(message)s", "%H:%M:%S")) logger.addHandler(fileHandler) logger.setLevel(logging.WARN)
Revert "Use tempfile lib for log file"
Revert "Use tempfile lib for log file" This reverts commit 6e8f35b83fc563c8349cb3be040c61a0588ca745. The commit caused severer issue than it fixed. In case one need to check the content of log file, there is no way to tell where the log file location/name is.
Python
mit
autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim
import logging - import tempfile logger = logging.getLogger("LanguageClient") - with tempfile.NamedTemporaryFile( - prefix="LanguageClient-", - suffix=".log", delete=False) as tmp: - tmpname = tmp.name - fileHandler = logging.FileHandler(filename=tmpname) + fileHandler = logging.FileHandler(filename="/tmp/LanguageClient.log") fileHandler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-8s %(message)s", "%H:%M:%S")) logger.addHandler(fileHandler) logger.setLevel(logging.WARN)
Revert "Use tempfile lib for log file"
## Code Before: import logging import tempfile logger = logging.getLogger("LanguageClient") with tempfile.NamedTemporaryFile( prefix="LanguageClient-", suffix=".log", delete=False) as tmp: tmpname = tmp.name fileHandler = logging.FileHandler(filename=tmpname) fileHandler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-8s %(message)s", "%H:%M:%S")) logger.addHandler(fileHandler) logger.setLevel(logging.WARN) ## Instruction: Revert "Use tempfile lib for log file" ## Code After: import logging logger = logging.getLogger("LanguageClient") fileHandler = logging.FileHandler(filename="/tmp/LanguageClient.log") fileHandler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-8s %(message)s", "%H:%M:%S")) logger.addHandler(fileHandler) logger.setLevel(logging.WARN)
import logging - import tempfile logger = logging.getLogger("LanguageClient") - with tempfile.NamedTemporaryFile( - prefix="LanguageClient-", - suffix=".log", delete=False) as tmp: - tmpname = tmp.name - fileHandler = logging.FileHandler(filename=tmpname) ? ^ + fileHandler = logging.FileHandler(filename="/tmp/LanguageClient.log") ? ++ +++ ++ ^ +++++++++++ fileHandler.setFormatter( logging.Formatter( "%(asctime)s %(levelname)-8s %(message)s", "%H:%M:%S")) logger.addHandler(fileHandler) logger.setLevel(logging.WARN)
aa7109d038a86f6a19a9fb4af96bd1199cd81330
functest/opnfv_tests/openstack/snaps/snaps_utils.py
functest/opnfv_tests/openstack/snaps/snaps_utils.py
from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ Returns the first external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
from functest.utils.constants import CONST from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ Returns the configured external network name or the first retrieved external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) if (hasattr(CONST, 'EXTERNAL_NETWORK')): extnet_config = CONST.__getattribute__('EXTERNAL_NETWORK') for ext_net in ext_nets: if ext_net.name == extnet_config: return extnet_config return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
Support to specify the valid external network name
Support to specify the valid external network name In some deployments, the retrieved external network by the def get_external_networks in Snaps checked by "router:external" is not available. So it is necessary to specify the available external network as an env by user. Change-Id: I333e91dd106ed307541a9a197280199fde86bd30 Signed-off-by: Linda Wang <[email protected]>
Python
apache-2.0
opnfv/functest,mywulin/functest,opnfv/functest,mywulin/functest
+ + from functest.utils.constants import CONST from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ - Returns the first external network name + Returns the configured external network name or + the first retrieved external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) + if (hasattr(CONST, 'EXTERNAL_NETWORK')): + extnet_config = CONST.__getattribute__('EXTERNAL_NETWORK') + for ext_net in ext_nets: + if ext_net.name == extnet_config: + return extnet_config return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
Support to specify the valid external network name
## Code Before: from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ Returns the first external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes) ## Instruction: Support to specify the valid external network name ## Code After: from functest.utils.constants import CONST from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ Returns the configured external network name or the first retrieved external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) if (hasattr(CONST, 'EXTERNAL_NETWORK')): extnet_config = CONST.__getattribute__('EXTERNAL_NETWORK') for ext_net in ext_nets: if ext_net.name == extnet_config: return extnet_config return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
+ + from functest.utils.constants import CONST from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ - Returns the first external network name ? ^^ + Returns the configured external network name or ? +++ ++ ^^ +++ + the first retrieved external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) + if (hasattr(CONST, 'EXTERNAL_NETWORK')): + extnet_config = CONST.__getattribute__('EXTERNAL_NETWORK') + for ext_net in ext_nets: + if ext_net.name == extnet_config: + return extnet_config return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
52abe8ef49f77ce859cba0a9042ea5761fcbcd90
fusionpy/__init__.py
fusionpy/__init__.py
from __future__ import print_function __all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester'] class FusionError(IOError): def __init__(self, response, request_body=None, message=None, url=None): if message is None: message = "" if url is not None: message = "Requested " + url + "\n" if request_body is not None: message += request_body message += "Status %d\n\n%s" % (response.status, response.data) IOError.__init__(self, message) self.response = response self.url = url
from __future__ import print_function __all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester'] class FusionError(IOError): def __init__(self, response, request_body=None, message=None, url=None): """ :param response: The HTTP response, having attributes .body and .status (or str or unicode) :param request_body: The HTTP request body that percipitated this error :param message: Any text to go along with this :param url: The URL requested """ if response.__class__ is str or response.__class__ is unicode: if message is None: message = response else: message += response response = None if message is None: message = "" if url is not None: message = "Requested " + url + "\n" if request_body is not None: message += request_body if response is not None: message += "Status %d\n\n%s" % (response.status, response.data) IOError.__init__(self, message) self.response = response self.url = url
Deal with strings in the first param to the FusionError constructor
Deal with strings in the first param to the FusionError constructor
Python
mit
ke4roh/fusionpy
from __future__ import print_function __all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester'] - class FusionError(IOError): def __init__(self, response, request_body=None, message=None, url=None): + """ + :param response: The HTTP response, having attributes .body and .status (or str or unicode) + :param request_body: The HTTP request body that percipitated this error + :param message: Any text to go along with this + :param url: The URL requested + """ + if response.__class__ is str or response.__class__ is unicode: + if message is None: + message = response + else: + message += response + response = None if message is None: message = "" if url is not None: message = "Requested " + url + "\n" if request_body is not None: message += request_body + if response is not None: - message += "Status %d\n\n%s" % (response.status, response.data) + message += "Status %d\n\n%s" % (response.status, response.data) IOError.__init__(self, message) self.response = response self.url = url - -
Deal with strings in the first param to the FusionError constructor
## Code Before: from __future__ import print_function __all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester'] class FusionError(IOError): def __init__(self, response, request_body=None, message=None, url=None): if message is None: message = "" if url is not None: message = "Requested " + url + "\n" if request_body is not None: message += request_body message += "Status %d\n\n%s" % (response.status, response.data) IOError.__init__(self, message) self.response = response self.url = url ## Instruction: Deal with strings in the first param to the FusionError constructor ## Code After: from __future__ import print_function __all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester'] class FusionError(IOError): def __init__(self, response, request_body=None, message=None, url=None): """ :param response: The HTTP response, having attributes .body and .status (or str or unicode) :param request_body: The HTTP request body that percipitated this error :param message: Any text to go along with this :param url: The URL requested """ if response.__class__ is str or response.__class__ is unicode: if message is None: message = response else: message += response response = None if message is None: message = "" if url is not None: message = "Requested " + url + "\n" if request_body is not None: message += request_body if response is not None: message += "Status %d\n\n%s" % (response.status, response.data) IOError.__init__(self, message) self.response = response self.url = url
from __future__ import print_function __all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester'] - class FusionError(IOError): def __init__(self, response, request_body=None, message=None, url=None): + """ + :param response: The HTTP response, having attributes .body and .status (or str or unicode) + :param request_body: The HTTP request body that percipitated this error + :param message: Any text to go along with this + :param url: The URL requested + """ + if response.__class__ is str or response.__class__ is unicode: + if message is None: + message = response + else: + message += response + response = None if message is None: message = "" if url is not None: message = "Requested " + url + "\n" if request_body is not None: message += request_body + if response is not None: - message += "Status %d\n\n%s" % (response.status, response.data) + message += "Status %d\n\n%s" % (response.status, response.data) ? ++++ IOError.__init__(self, message) self.response = response self.url = url - -
fc203d643aa9a69c835aebee0de9b17851ef7a58
compose/cli/docker_client.py
compose/cli/docker_client.py
from docker import Client from docker import tls import ssl import os def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)) return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout)
from docker import Client from docker import tls import ssl import os def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') api_version = os.environ.get('COMPOSE_API_VERSION', '1.18') tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)) return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
Allow API version specification via env var
Allow API version specification via env var Hard-coding the API version to '1.18' with the docker-py constructor will cause the docker-py logic at https://github.com/docker/docker-py/blob/master/docker/client.py#L143-L146 to always fail, which will cause authentication issues if you're using a remote daemon using API version 1.19 - regardless of the API version of the registry. Allow the user to set the API version via an environment variable. If the variable is not present, it will still default to '1.18' like it does today. Signed-off-by: Reilly Herrewig-Pope <[email protected]>
Python
apache-2.0
jeanpralo/compose,shubheksha/docker.github.io,saada/compose,talolard/compose,joaofnfernandes/docker.github.io,ionrock/compose,iamluc/compose,goloveychuk/compose,vdemeester/compose,menglingwei/denverdino.github.io,qzio/compose,d2bit/compose,docker/docker.github.io,GM-Alex/compose,bdwill/docker.github.io,anweiss/docker.github.io,KalleDK/compose,londoncalling/docker.github.io,jiekechoo/compose,londoncalling/docker.github.io,cgvarela/compose,cgvarela/compose,albers/compose,thaJeztah/compose,gdevillele/docker.github.io,bbirand/compose,au-phiware/compose,ph-One/compose,denverdino/docker.github.io,alexisbellido/docker.github.io,moxiegirl/compose,twitherspoon/compose,mchasal/compose,denverdino/docker.github.io,unodba/compose,vlajos/compose,lmesz/compose,mrfuxi/compose,troy0820/docker.github.io,mdaue/compose,LuisBosquez/docker.github.io,troy0820/docker.github.io,artemkaint/compose,gtrdotmcs/compose,joaofnfernandes/docker.github.io,denverdino/docker.github.io,phiroict/docker,jzwlqx/denverdino.github.io,simonista/compose,jrabbit/compose,docker/docker.github.io,rgbkrk/compose,ionrock/compose,TheDataShed/compose,jorgeLuizChaves/compose,menglingwei/denverdino.github.io,dbdd4us/compose,tiry/compose,shin-/docker.github.io,denverdino/denverdino.github.io,charleswhchan/compose,hoogenm/compose,alexisbellido/docker.github.io,KevinGreene/compose,artemkaint/compose,johnstep/docker.github.io,gdevillele/docker.github.io,danix800/docker.github.io,talolard/compose,d2bit/compose,ggtools/compose,kojiromike/compose,alexandrev/compose,jzwlqx/denverdino.github.io,jeanpralo/compose,funkyfuture/docker-compose,dopry/compose,mnuessler/compose,troy0820/docker.github.io,lmesz/compose,saada/compose,thaJeztah/docker.github.io,alexandrev/compose,sanscontext/docker.github.io,thaJeztah/docker.github.io,mbailey/compose,swoopla/compose,shubheksha/docker.github.io,shin-/compose,aanand/fig,TomasTomecek/compose,screwgoth/compose,joeuo/docker.github.io,dilgerma/compose,VinceBarresi/compose,LuisBosquez/docker.github.io,joaofnfernandes/docker.github.io,denverdino/denverdino.github.io,Dakno/compose,JimGalasyn/docker.github.io,bbirand/compose,TomasTomecek/compose,anweiss/docker.github.io,aanand/fig,browning/compose,mohitsoni/compose,shubheksha/docker.github.io,joaofnfernandes/docker.github.io,viranch/compose,thaJeztah/docker.github.io,BSWANG/denverdino.github.io,joaofnfernandes/docker.github.io,mohitsoni/compose,mnowster/compose,JimGalasyn/docker.github.io,bdwill/docker.github.io,alexisbellido/docker.github.io,BSWANG/denverdino.github.io,michael-k/docker-compose,jonaseck2/compose,simonista/compose,JimGalasyn/docker.github.io,andrewgee/compose,docker-zh/docker.github.io,joeuo/docker.github.io,unodba/compose,mrfuxi/compose,genki/compose,bdwill/docker.github.io,runcom/compose,docker-zh/docker.github.io,sdurrheimer/compose,JimGalasyn/docker.github.io,menglingwei/denverdino.github.io,aduermael/docker.github.io,thaJeztah/compose,andrewgee/compose,dbdd4us/compose,GM-Alex/compose,alunduil/fig,anweiss/docker.github.io,denverdino/denverdino.github.io,mchasal/compose,johnstep/docker.github.io,dnephin/compose,ggtools/compose,BSWANG/denverdino.github.io,aduermael/docker.github.io,browning/compose,bsmr-docker/compose,swoopla/compose,danix800/docker.github.io,mindaugasrukas/compose,jiekechoo/compose,rillig/docker.github.io,bdwill/docker.github.io,ZJaffee/compose,denverdino/denverdino.github.io,dilgerma/compose,docker/docker.github.io,docker-zh/docker.github.io,MSakamaki/compose,JimGalasyn/docker.github.io,denverdino/docker.github.io,danix800/docker.github.io,j-fuentes/compose,londoncalling/docker.github.io,jzwlqx/denverdino.github.io,KevinGreene/compose,dopry/compose,vlajos/compose,jzwlqx/denverdino.github.io,menglingwei/denverdino.github.io,johnstep/docker.github.io,danix800/docker.github.io,tangkun75/compose,mbailey/compose,jonaseck2/compose,anweiss/docker.github.io,denverdino/compose,Dakno/compose,alunduil/fig,kikkomep/compose,londoncalling/docker.github.io,josephpage/compose,jrabbit/compose,xydinesh/compose,dockerhn/compose,docker/docker.github.io,viranch/compose,VinceBarresi/compose,denverdino/docker.github.io,londoncalling/docker.github.io,aduermael/docker.github.io,j-fuentes/compose,charleswhchan/compose,rgbkrk/compose,shin-/compose,denverdino/compose,sanscontext/docker.github.io,goloveychuk/compose,docker-zh/docker.github.io,anweiss/docker.github.io,mark-adams/compose,bsmr-docker/compose,bdwill/docker.github.io,kojiromike/compose,TheDataShed/compose,qzio/compose,mnowster/compose,screwgoth/compose,rillig/docker.github.io,mark-adams/compose,sanscontext/docker.github.io,au-phiware/compose,tangkun75/compose,shubheksha/docker.github.io,gdevillele/docker.github.io,thaJeztah/docker.github.io,nhumrich/compose,johnstep/docker.github.io,iamluc/compose,shin-/docker.github.io,runcom/compose,ChrisChinchilla/compose,moxiegirl/compose,alexisbellido/docker.github.io,jorgeLuizChaves/compose,genki/compose,tiry/compose,denverdino/denverdino.github.io,phiroict/docker,joeuo/docker.github.io,docker-zh/docker.github.io,rillig/docker.github.io,BSWANG/denverdino.github.io,phiroict/docker,phiroict/docker,ph-One/compose,ChrisChinchilla/compose,johnstep/docker.github.io,gdevillele/docker.github.io,vdemeester/compose,shin-/docker.github.io,shin-/docker.github.io,kikkomep/compose,mdaue/compose,dnephin/compose,aduermael/docker.github.io,shin-/docker.github.io,joeuo/docker.github.io,KalleDK/compose,joeuo/docker.github.io,LuisBosquez/docker.github.io,jzwlqx/denverdino.github.io,menglingwei/denverdino.github.io,docker/docker.github.io,funkyfuture/docker-compose,twitherspoon/compose,sanscontext/docker.github.io,alexisbellido/docker.github.io,gdevillele/docker.github.io,amitsaha/compose,schmunk42/compose,troy0820/docker.github.io,gtrdotmcs/compose,mindaugasrukas/compose,hoogenm/compose,mnuessler/compose,josephpage/compose,LuisBosquez/docker.github.io,sanscontext/docker.github.io,xydinesh/compose,rillig/docker.github.io,albers/compose,phiroict/docker,ZJaffee/compose,amitsaha/compose,shubheksha/docker.github.io,dockerhn/compose,nhumrich/compose,thaJeztah/docker.github.io,LuisBosquez/docker.github.io,michael-k/docker-compose,schmunk42/compose,sdurrheimer/compose,MSakamaki/compose,BSWANG/denverdino.github.io
from docker import Client from docker import tls import ssl import os def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') + api_version = os.environ.get('COMPOSE_API_VERSION', '1.18') + tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)) - return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout) + return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
Allow API version specification via env var
## Code Before: from docker import Client from docker import tls import ssl import os def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)) return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout) ## Instruction: Allow API version specification via env var ## Code After: from docker import Client from docker import tls import ssl import os def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') api_version = os.environ.get('COMPOSE_API_VERSION', '1.18') tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)) return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout)
from docker import Client from docker import tls import ssl import os def docker_client(): """ Returns a docker-py client configured using environment variables according to the same logic as the official Docker client. """ cert_path = os.environ.get('DOCKER_CERT_PATH', '') if cert_path == '': cert_path = os.path.join(os.environ.get('HOME', ''), '.docker') base_url = os.environ.get('DOCKER_HOST') + api_version = os.environ.get('COMPOSE_API_VERSION', '1.18') + tls_config = None if os.environ.get('DOCKER_TLS_VERIFY', '') != '': parts = base_url.split('://', 1) base_url = '%s://%s' % ('https', parts[1]) client_cert = (os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')) ca_cert = os.path.join(cert_path, 'ca.pem') tls_config = tls.TLSConfig( ssl_version=ssl.PROTOCOL_TLSv1, verify=True, assert_hostname=False, client_cert=client_cert, ca_cert=ca_cert, ) timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60)) - return Client(base_url=base_url, tls=tls_config, version='1.18', timeout=timeout) ? ^^^^^^ + return Client(base_url=base_url, tls=tls_config, version=api_version, timeout=timeout) ? ^^^^^^^^^^^
107b97e952d731f8c55c9ca3208ecd2a41512b8d
tests/integration/modules/sysmod.py
tests/integration/modules/sysmod.py
import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) def test_valid_docs(self): ''' Make sure no functions are exposed that don't have valid docstrings ''' docs = self.run_function('sys.doc') bad = set() for fun in docs: if fun.startswith('runtests_helpers'): continue if not isinstance(docs[fun], basestring): bad.add(fun) elif not 'Example::' in docs[fun]: if not 'Examples::' in docs[fun]: bad.add(fun) if bad: import pprint pprint.pprint(sorted(bad)) self.assertFalse(bool(bad)) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
Add test to verify loader modules
Add test to verify loader modules
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) + def test_valid_docs(self): + ''' + Make sure no functions are exposed that don't have valid docstrings + ''' + docs = self.run_function('sys.doc') + bad = set() + for fun in docs: + if fun.startswith('runtests_helpers'): + continue + if not isinstance(docs[fun], basestring): + bad.add(fun) + elif not 'Example::' in docs[fun]: + if not 'Examples::' in docs[fun]: + bad.add(fun) + if bad: + import pprint + pprint.pprint(sorted(bad)) + self.assertFalse(bool(bad)) + if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
Add test to verify loader modules
## Code Before: import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest) ## Instruction: Add test to verify loader modules ## Code After: import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) def test_valid_docs(self): ''' Make sure no functions are exposed that don't have valid docstrings ''' docs = self.run_function('sys.doc') bad = set() for fun in docs: if fun.startswith('runtests_helpers'): continue if not isinstance(docs[fun], basestring): bad.add(fun) elif not 'Example::' in docs[fun]: if not 'Examples::' in docs[fun]: bad.add(fun) if bad: import pprint pprint.pprint(sorted(bad)) self.assertFalse(bool(bad)) if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
import integration class SysModuleTest(integration.ModuleCase): ''' Validate the sys module ''' def test_list_functions(self): ''' sys.list_functions ''' funcs = self.run_function('sys.list_functions') self.assertTrue('hosts.list_hosts' in funcs) self.assertTrue('pkg.install' in funcs) def test_list_modules(self): ''' sys.list_moduels ''' mods = self.run_function('sys.list_modules') self.assertTrue('hosts' in mods) self.assertTrue('pkg' in mods) + def test_valid_docs(self): + ''' + Make sure no functions are exposed that don't have valid docstrings + ''' + docs = self.run_function('sys.doc') + bad = set() + for fun in docs: + if fun.startswith('runtests_helpers'): + continue + if not isinstance(docs[fun], basestring): + bad.add(fun) + elif not 'Example::' in docs[fun]: + if not 'Examples::' in docs[fun]: + bad.add(fun) + if bad: + import pprint + pprint.pprint(sorted(bad)) + self.assertFalse(bool(bad)) + if __name__ == '__main__': from integration import run_tests run_tests(SysModuleTest)
e43345616e5240274e852a722c0c72c07f988b2a
registration/__init__.py
registration/__init__.py
VERSION = (0, 9, 0, 'beta', 1) def get_version(): from django.utils.version import get_version as django_get_version return django_get_version(VERSION) # pragma: no cover
VERSION = (1, 0, 0, 'final', 0) def get_version(): "Returns a PEP 386-compliant version number from VERSION." assert len(VERSION) == 5 assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if VERSION[2] == 0 else 3 main = '.'.join(str(x) for x in VERSION[:parts]) sub = '' if VERSION[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
Fix version number reporting so we can be installed before Django.
Fix version number reporting so we can be installed before Django.
Python
bsd-3-clause
myimages/django-registration,Troyhy/django-registration,mypebble/djregs,akvo/django-registration,Troyhy/django-registration,hacklabr/django-registration,gone/django-registration,akvo/django-registration,tdruez/django-registration,dirtycoder/django-registration,sandipagr/django-registration,kennydude/djregs,danielsamuels/django-registration,awakeup/django-registration,ubernostrum/django-registration,sandipagr/django-registration,gone/django-registration,hacklabr/django-registration
- VERSION = (0, 9, 0, 'beta', 1) + VERSION = (1, 0, 0, 'final', 0) def get_version(): - from django.utils.version import get_version as django_get_version - return django_get_version(VERSION) # pragma: no cover + "Returns a PEP 386-compliant version number from VERSION." + assert len(VERSION) == 5 + assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') + # Now build the two parts of the version number: + # main = X.Y[.Z] + # sub = .devN - for pre-alpha releases + # | {a|b|c}N - for alpha, beta and rc releases + + parts = 2 if VERSION[2] == 0 else 3 + main = '.'.join(str(x) for x in VERSION[:parts]) + + sub = '' + if VERSION[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[VERSION[3]] + str(VERSION[4]) + + return str(main + sub) +
Fix version number reporting so we can be installed before Django.
## Code Before: VERSION = (0, 9, 0, 'beta', 1) def get_version(): from django.utils.version import get_version as django_get_version return django_get_version(VERSION) # pragma: no cover ## Instruction: Fix version number reporting so we can be installed before Django. ## Code After: VERSION = (1, 0, 0, 'final', 0) def get_version(): "Returns a PEP 386-compliant version number from VERSION." assert len(VERSION) == 5 assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|c}N - for alpha, beta and rc releases parts = 2 if VERSION[2] == 0 else 3 main = '.'.join(str(x) for x in VERSION[:parts]) sub = '' if VERSION[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
- VERSION = (0, 9, 0, 'beta', 1) ? ^ ^ ^^^ ^ + VERSION = (1, 0, 0, 'final', 0) ? ^ ^ ^^^ + ^ def get_version(): - from django.utils.version import get_version as django_get_version - return django_get_version(VERSION) # pragma: no cover + "Returns a PEP 386-compliant version number from VERSION." + assert len(VERSION) == 5 + assert VERSION[3] in ('alpha', 'beta', 'rc', 'final') + + # Now build the two parts of the version number: + # main = X.Y[.Z] + # sub = .devN - for pre-alpha releases + # | {a|b|c}N - for alpha, beta and rc releases + + parts = 2 if VERSION[2] == 0 else 3 + main = '.'.join(str(x) for x in VERSION[:parts]) + + sub = '' + if VERSION[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[VERSION[3]] + str(VERSION[4]) + + return str(main + sub)
0683e4fb0431563758d93b39d102d1c634a4535b
run.py
run.py
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subjects') ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subject') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
Change the subject url from /quip/subjects to /quip/subject.
Change the subject url from /quip/subjects to /quip/subject.
Python
bsd-2-clause
ohsu-qin/qiprofile-rest,ohsu-qin/qirest
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. - ext.add_model(models.Subject, url='subjects') + ext.add_model(models.Subject, url='subject') - ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
Change the subject url from /quip/subjects to /quip/subject.
## Code Before: import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subjects') ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run() ## Instruction: Change the subject url from /quip/subjects to /quip/subject. ## Code After: import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. ext.add_model(models.Subject, url='subject') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
import importlib import mongoengine from eve import Eve from eve_mongoengine import EveMongoengine from qiprofile_rest import models # The application. app = Eve() # The MongoEngine ORM extension. ext = EveMongoengine(app) # Register the model non-embedded documdent classes. - ext.add_model(models.Subject, url='subjects') ? - + ext.add_model(models.Subject, url='subject') - ext.add_model(models.SubjectDetail, url='subject-detail') ext.add_model(models.SessionDetail, url='session-detail') if __name__ == '__main__': app.run()
5a764e0b91db628efd20d63d70c5ed688695f8b1
app/routes.py
app/routes.py
from app import app from flask import redirect, render_template @app.route('/') def index(): return render_template('index.html') # default 'catch all' route @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def catch_all(path): return redirect('/')
from app import app from app.models import Digit from flask import redirect, render_template, request, jsonify @app.route('/') def index(): return render_template('index.html') # api route # parameters # # id: id to query, will return all otherwise # select: one value per item in the query # limit: limit, obviously. @app.route('/api') def api(): query_id = request.args.get('id') # get first id in query string query_limit = request.args.get('limit') # get first limit in query string query_select = request.args.getlist('select') # get all select params if query_id is not None: result = Digit.query.\ filter(Digit.id == query_id).\ all() else: result = Digit.query.limit(query_limit).all() return jsonify(result=[r.as_dict(query_select) for r in result]) # default 'catch all' route @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def catch_all(path): return redirect('/')
Add basic functional DB /api route
Add basic functional DB /api route
Python
mit
starcalibre/MNIST3D,starcalibre/MNIST3D,starcalibre/MNIST3D
from app import app + from app.models import Digit - from flask import redirect, render_template + from flask import redirect, render_template, request, jsonify @app.route('/') def index(): return render_template('index.html') + + # api route + # parameters + # + # id: id to query, will return all otherwise + # select: one value per item in the query + # limit: limit, obviously. + @app.route('/api') + def api(): + query_id = request.args.get('id') # get first id in query string + query_limit = request.args.get('limit') # get first limit in query string + query_select = request.args.getlist('select') # get all select params + + if query_id is not None: + result = Digit.query.\ + filter(Digit.id == query_id).\ + all() + else: + result = Digit.query.limit(query_limit).all() + return jsonify(result=[r.as_dict(query_select) for r in result]) # default 'catch all' route @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def catch_all(path): return redirect('/')
Add basic functional DB /api route
## Code Before: from app import app from flask import redirect, render_template @app.route('/') def index(): return render_template('index.html') # default 'catch all' route @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def catch_all(path): return redirect('/') ## Instruction: Add basic functional DB /api route ## Code After: from app import app from app.models import Digit from flask import redirect, render_template, request, jsonify @app.route('/') def index(): return render_template('index.html') # api route # parameters # # id: id to query, will return all otherwise # select: one value per item in the query # limit: limit, obviously. @app.route('/api') def api(): query_id = request.args.get('id') # get first id in query string query_limit = request.args.get('limit') # get first limit in query string query_select = request.args.getlist('select') # get all select params if query_id is not None: result = Digit.query.\ filter(Digit.id == query_id).\ all() else: result = Digit.query.limit(query_limit).all() return jsonify(result=[r.as_dict(query_select) for r in result]) # default 'catch all' route @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def catch_all(path): return redirect('/')
from app import app + from app.models import Digit - from flask import redirect, render_template + from flask import redirect, render_template, request, jsonify ? ++++++++++++++++++ @app.route('/') def index(): return render_template('index.html') + + # api route + # parameters + # + # id: id to query, will return all otherwise + # select: one value per item in the query + # limit: limit, obviously. + @app.route('/api') + def api(): + query_id = request.args.get('id') # get first id in query string + query_limit = request.args.get('limit') # get first limit in query string + query_select = request.args.getlist('select') # get all select params + + if query_id is not None: + result = Digit.query.\ + filter(Digit.id == query_id).\ + all() + else: + result = Digit.query.limit(query_limit).all() + return jsonify(result=[r.as_dict(query_select) for r in result]) # default 'catch all' route @app.route('/', defaults={'path': ''}) @app.route('/<path:path>') def catch_all(path): return redirect('/')
8fad8a4f1591fb4a7d7d1bdf932c5918197b475c
tests/client.py
tests/client.py
from htmltree import * def start(): console.log("Starting") newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green')) console.log(newcontent.render(0)) document.body.innerHTML = newcontent.render() console.log("Finished") document.addEventListener('DOMContentLoaded', start)
from htmltree import * def start(): console.log("Starting") ## insert a style element at the end of the <head? cssrules = {'.test':{'color':'green', 'text-align':'center'}} style = Style(**cssrules) document.head.insertAdjacentHTML('beforeend', style.render()) ## Replace the <body> content newcontent = Div(H1("Sanity check PASS", _class='test')) document.body.innerHTML = newcontent.render() console.log("Finished") ## JS is event driven. ## Wait for DOM load to complete before firing ## our start() function. document.addEventListener('DOMContentLoaded', start)
Fix <style> rendering under Transcrypt.
Fix <style> rendering under Transcrypt. The hasattr test in renderCss() was failing when it shouldn't have. Fixed by removal. Updated tests/client.py to create and append a style element to detect problems related to Style() on the client side.
Python
mit
Michael-F-Ellis/htmltree
from htmltree import * def start(): console.log("Starting") + ## insert a style element at the end of the <head? + cssrules = {'.test':{'color':'green', 'text-align':'center'}} + style = Style(**cssrules) + document.head.insertAdjacentHTML('beforeend', style.render()) + + ## Replace the <body> content - newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green')) + newcontent = Div(H1("Sanity check PASS", _class='test')) - console.log(newcontent.render(0)) document.body.innerHTML = newcontent.render() console.log("Finished") + + ## JS is event driven. + ## Wait for DOM load to complete before firing + ## our start() function. document.addEventListener('DOMContentLoaded', start)
Fix <style> rendering under Transcrypt.
## Code Before: from htmltree import * def start(): console.log("Starting") newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green')) console.log(newcontent.render(0)) document.body.innerHTML = newcontent.render() console.log("Finished") document.addEventListener('DOMContentLoaded', start) ## Instruction: Fix <style> rendering under Transcrypt. ## Code After: from htmltree import * def start(): console.log("Starting") ## insert a style element at the end of the <head? cssrules = {'.test':{'color':'green', 'text-align':'center'}} style = Style(**cssrules) document.head.insertAdjacentHTML('beforeend', style.render()) ## Replace the <body> content newcontent = Div(H1("Sanity check PASS", _class='test')) document.body.innerHTML = newcontent.render() console.log("Finished") ## JS is event driven. ## Wait for DOM load to complete before firing ## our start() function. document.addEventListener('DOMContentLoaded', start)
from htmltree import * def start(): console.log("Starting") + ## insert a style element at the end of the <head? + cssrules = {'.test':{'color':'green', 'text-align':'center'}} + style = Style(**cssrules) + document.head.insertAdjacentHTML('beforeend', style.render()) + + ## Replace the <body> content - newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green')) ? -------------------------- + newcontent = Div(H1("Sanity check PASS", _class='test')) ? ++++ - console.log(newcontent.render(0)) document.body.innerHTML = newcontent.render() console.log("Finished") + + ## JS is event driven. + ## Wait for DOM load to complete before firing + ## our start() function. document.addEventListener('DOMContentLoaded', start)
07ea0d8ec5c65f0fc94dc29f8b03402c571d3a42
qipipe/interfaces/fix_dicom.py
qipipe/interfaces/fix_dicom.py
import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_files = traits.List(desc="The modified output files", trait=File, exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_files'] = self._out_files return outputs
import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_file = File(desc="The modified output file", exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_file'] = self._out_file return outputs
Fix only one file at a time.
Fix only one file at a time.
Python
bsd-2-clause
ohsu-qin/qipipe
import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) - in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True) + in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): - out_files = traits.List(desc="The modified output files", trait=File, exists=True) + out_file = File(desc="The modified output file", exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): - self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files) + self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = self._out_files + outputs['out_file'] = self._out_file return outputs
Fix only one file at a time.
## Code Before: import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_files = traits.List(desc="The modified output files", trait=File, exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_files'] = self._out_files return outputs ## Instruction: Fix only one file at a time. ## Code After: import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): out_file = File(desc="The modified output file", exists=True) class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() outputs['out_file'] = self._out_file return outputs
import os from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits, InputMultiPath, File, Directory, TraitedSpec) from qipipe.staging.fix_dicom import fix_dicom_headers class FixDicomInputSpec(BaseInterfaceInputSpec): collection = traits.Str(desc='The image collection', mandatory=True) subject = traits.Str(desc='The subject name', mandatory=True) - in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True) ? - --------------- - - + in_file = File(exists=True, desc='The input DICOM file', mandatory=True) class FixDicomOutputSpec(TraitedSpec): - out_files = traits.List(desc="The modified output files", trait=File, exists=True) ? - ^^^ ^^^^^^^ - ------------ + out_file = File(desc="The modified output file", exists=True) ? ^ ^^ class FixDicom(BaseInterface): """The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers` function.""" input_spec = FixDicomInputSpec output_spec = FixDicomOutputSpec def _run_interface(self, runtime): - self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files) ? - - - + self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file) return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_files'] = self._out_files ? - - + outputs['out_file'] = self._out_file return outputs
aea05ee76193ac0abe2f6673910917bf13a3b339
setup.py
setup.py
from distutils.core import setup setup( name='simplecrypto', version=open('CHANGES.txt').read().split()[0], author='Lucas Boppre Niehues', author_email='[email protected]', packages=['simplecrypto'], url='http://pypi.python.org/pypi/simplecrypto/', license='LICENSE.txt', description='simplecrypto', long_description=open('README.md').read(), install_requires=[ 'PyCrypto', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: Security :: Cryptography', 'License :: OSI Approved :: MIT License', ], )
from distutils.core import setup setup( name='simplecrypto', version=open('CHANGES.txt').read().split()[0], author='Lucas Boppre Niehues', author_email='[email protected]', packages=['simplecrypto'], url='https://github.com/boppreh/simplecrypto', license='LICENSE.txt', description='simplecrypto', long_description=open('README.md').read(), install_requires=[ 'PyCrypto', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: Security :: Cryptography', 'License :: OSI Approved :: MIT License', ], )
Change homepage to github URL
Change homepage to github URL
Python
mit
boppreh/simplecrypto
from distutils.core import setup setup( name='simplecrypto', version=open('CHANGES.txt').read().split()[0], author='Lucas Boppre Niehues', author_email='[email protected]', packages=['simplecrypto'], - url='http://pypi.python.org/pypi/simplecrypto/', + url='https://github.com/boppreh/simplecrypto', license='LICENSE.txt', description='simplecrypto', long_description=open('README.md').read(), install_requires=[ 'PyCrypto', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: Security :: Cryptography', 'License :: OSI Approved :: MIT License', ], )
Change homepage to github URL
## Code Before: from distutils.core import setup setup( name='simplecrypto', version=open('CHANGES.txt').read().split()[0], author='Lucas Boppre Niehues', author_email='[email protected]', packages=['simplecrypto'], url='http://pypi.python.org/pypi/simplecrypto/', license='LICENSE.txt', description='simplecrypto', long_description=open('README.md').read(), install_requires=[ 'PyCrypto', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: Security :: Cryptography', 'License :: OSI Approved :: MIT License', ], ) ## Instruction: Change homepage to github URL ## Code After: from distutils.core import setup setup( name='simplecrypto', version=open('CHANGES.txt').read().split()[0], author='Lucas Boppre Niehues', author_email='[email protected]', packages=['simplecrypto'], url='https://github.com/boppreh/simplecrypto', license='LICENSE.txt', description='simplecrypto', long_description=open('README.md').read(), install_requires=[ 'PyCrypto', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: Security :: Cryptography', 'License :: OSI Approved :: MIT License', ], )
from distutils.core import setup setup( name='simplecrypto', version=open('CHANGES.txt').read().split()[0], author='Lucas Boppre Niehues', author_email='[email protected]', packages=['simplecrypto'], - url='http://pypi.python.org/pypi/simplecrypto/', + url='https://github.com/boppreh/simplecrypto', license='LICENSE.txt', description='simplecrypto', long_description=open('README.md').read(), install_requires=[ 'PyCrypto', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: Security :: Cryptography', 'License :: OSI Approved :: MIT License', ], )
a65eb4af0c35c8e79d44efa6acb546e19008a8ee
elmo/moon_tracker/forms.py
elmo/moon_tracker/forms.py
from django import forms import csv from io import StringIO class BatchMoonScanForm(forms.Form): data = forms.CharField( widget=forms.Textarea(attrs={'class':'form-control monospace'}), ) def clean(self): cleaned_data = super(BatchMoonScanForm, self).clean() raw = StringIO(cleaned_data['data']) reader = csv.reader(raw, delimiter='\t') next(reader) res = [] for x in reader: print(x) if len(x) == 1: assert(len(x[0]) > 0) current_moon = 0 current_scan = {} res.append(current_scan) else: assert(len(x[0]) == 0) moon_id = int(x[6]) ore_id = int(x[3]) percentage = int(round(100 * float(x[2]))) if current_moon == 0: current_moon = moon_id else: assert(moon_id == current_moon) assert(ore_id not in current_scan) current_scan[ore_id] = percentage print(res) cleaned_data['data'] = res
from django import forms import csv from io import StringIO class BatchMoonScanForm(forms.Form): data = forms.CharField( widget=forms.Textarea(attrs={'class':'form-control monospace'}), ) def clean(self): cleaned_data = super(BatchMoonScanForm, self).clean() raw = StringIO(cleaned_data['data']) reader = csv.reader(raw, delimiter='\t') next(reader) res = {} current_moon = 0 percentage_sum = 0 current_scan = {} for x in reader: print(x) if len(x) == 1: if len(x[0]) == 0: raise forms.ValidationError('Invalid input format.') if current_moon != 0 and percentage_sum != 100: raise forms.ValidationError('Sum of percentages must be 100.') if len(current_scan) > 0 and current_moon != 0: res[current_moon] = current_scan current_moon = 0 percentage_sum = 0 current_scan = {} else: if len(x[0]) != 0: raise forms.ValidationError('Invalid input format.') moon_id = int(x[6]) ore_id = int(x[3]) percentage = int(round(100 * float(x[2]))) percentage_sum += percentage if current_moon == 0: current_moon = moon_id elif moon_id != current_moon: raise forms.ValidationError('Unexpected moon ID.') if ore_id in current_scan: raise forms.ValidationError('Unexpected moon ID.') current_scan[ore_id] = percentage print(res) cleaned_data['data'] = res
Improve batch form return data structure.
Improve batch form return data structure.
Python
mit
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
from django import forms import csv from io import StringIO class BatchMoonScanForm(forms.Form): data = forms.CharField( widget=forms.Textarea(attrs={'class':'form-control monospace'}), ) def clean(self): cleaned_data = super(BatchMoonScanForm, self).clean() raw = StringIO(cleaned_data['data']) reader = csv.reader(raw, delimiter='\t') next(reader) - res = [] + res = {} + current_moon = 0 + percentage_sum = 0 + current_scan = {} for x in reader: print(x) if len(x) == 1: - assert(len(x[0]) > 0) + if len(x[0]) == 0: + raise forms.ValidationError('Invalid input format.') + + if current_moon != 0 and percentage_sum != 100: + raise forms.ValidationError('Sum of percentages must be 100.') + + if len(current_scan) > 0 and current_moon != 0: + res[current_moon] = current_scan current_moon = 0 + percentage_sum = 0 current_scan = {} - res.append(current_scan) else: - assert(len(x[0]) == 0) + if len(x[0]) != 0: + raise forms.ValidationError('Invalid input format.') moon_id = int(x[6]) ore_id = int(x[3]) percentage = int(round(100 * float(x[2]))) + percentage_sum += percentage + if current_moon == 0: current_moon = moon_id - else: - assert(moon_id == current_moon) + elif moon_id != current_moon: + raise forms.ValidationError('Unexpected moon ID.') - assert(ore_id not in current_scan) + if ore_id in current_scan: + raise forms.ValidationError('Unexpected moon ID.') current_scan[ore_id] = percentage print(res) cleaned_data['data'] = res
Improve batch form return data structure.
## Code Before: from django import forms import csv from io import StringIO class BatchMoonScanForm(forms.Form): data = forms.CharField( widget=forms.Textarea(attrs={'class':'form-control monospace'}), ) def clean(self): cleaned_data = super(BatchMoonScanForm, self).clean() raw = StringIO(cleaned_data['data']) reader = csv.reader(raw, delimiter='\t') next(reader) res = [] for x in reader: print(x) if len(x) == 1: assert(len(x[0]) > 0) current_moon = 0 current_scan = {} res.append(current_scan) else: assert(len(x[0]) == 0) moon_id = int(x[6]) ore_id = int(x[3]) percentage = int(round(100 * float(x[2]))) if current_moon == 0: current_moon = moon_id else: assert(moon_id == current_moon) assert(ore_id not in current_scan) current_scan[ore_id] = percentage print(res) cleaned_data['data'] = res ## Instruction: Improve batch form return data structure. ## Code After: from django import forms import csv from io import StringIO class BatchMoonScanForm(forms.Form): data = forms.CharField( widget=forms.Textarea(attrs={'class':'form-control monospace'}), ) def clean(self): cleaned_data = super(BatchMoonScanForm, self).clean() raw = StringIO(cleaned_data['data']) reader = csv.reader(raw, delimiter='\t') next(reader) res = {} current_moon = 0 percentage_sum = 0 current_scan = {} for x in reader: print(x) if len(x) == 1: if len(x[0]) == 0: raise forms.ValidationError('Invalid input format.') if current_moon != 0 and percentage_sum != 100: raise forms.ValidationError('Sum of percentages must be 100.') if len(current_scan) > 0 and current_moon != 0: res[current_moon] = current_scan current_moon = 0 percentage_sum = 0 current_scan = {} else: if len(x[0]) != 0: raise forms.ValidationError('Invalid input format.') moon_id = int(x[6]) ore_id = int(x[3]) percentage = int(round(100 * float(x[2]))) percentage_sum += percentage if current_moon == 0: current_moon = moon_id elif moon_id != current_moon: raise forms.ValidationError('Unexpected moon ID.') if ore_id in current_scan: raise forms.ValidationError('Unexpected moon ID.') current_scan[ore_id] = percentage print(res) cleaned_data['data'] = res
from django import forms import csv from io import StringIO class BatchMoonScanForm(forms.Form): data = forms.CharField( widget=forms.Textarea(attrs={'class':'form-control monospace'}), ) def clean(self): cleaned_data = super(BatchMoonScanForm, self).clean() raw = StringIO(cleaned_data['data']) reader = csv.reader(raw, delimiter='\t') next(reader) - res = [] ? ^^ + res = {} ? ^^ + current_moon = 0 + percentage_sum = 0 + current_scan = {} for x in reader: print(x) if len(x) == 1: - assert(len(x[0]) > 0) ? ^^^^^^^ ^ ^ + if len(x[0]) == 0: ? ^^^ ^^ ^ + raise forms.ValidationError('Invalid input format.') + + if current_moon != 0 and percentage_sum != 100: + raise forms.ValidationError('Sum of percentages must be 100.') + + if len(current_scan) > 0 and current_moon != 0: + res[current_moon] = current_scan current_moon = 0 + percentage_sum = 0 current_scan = {} - res.append(current_scan) else: - assert(len(x[0]) == 0) ? ^^^^^^^ ^ ^ + if len(x[0]) != 0: ? ^^^ ^ ^ + raise forms.ValidationError('Invalid input format.') moon_id = int(x[6]) ore_id = int(x[3]) percentage = int(round(100 * float(x[2]))) + percentage_sum += percentage + if current_moon == 0: current_moon = moon_id - else: - assert(moon_id == current_moon) ? ---------- ^ ^ + elif moon_id != current_moon: ? ++++ ^ ^ + raise forms.ValidationError('Unexpected moon ID.') - assert(ore_id not in current_scan) ? ^^^^^^^ ---- ^ + if ore_id in current_scan: ? ^^^ ^ + raise forms.ValidationError('Unexpected moon ID.') current_scan[ore_id] = percentage print(res) cleaned_data['data'] = res
34c0a728add7715a9420537f57f7c1a69176c57d
tests/serializer/abstract_test.py
tests/serializer/abstract_test.py
import _path _path.fix() ## # python standard library # from functools import partial import unittest ## # pygrapes modules # from pygrapes.serializer import Base class BaseSerializerTestCase(unittest.TestCase): def test_method_dumps_exists(self): self.assertTrue(hasattr(Base(), 'dumps')) def test_method_dumps_expects_one_arg(self): self.assertRaises(TypeError, Base().dumps) def test_dumps_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Base().dumps, 1)) def test_method_loads_exists(self): self.assertTrue(hasattr(Base(), 'loads')) def test_method_loads_expects_one_arg(self): self.assertRaises(TypeError, Base().loads) def test_loads_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Base().loads, 1)) if "__main__" == __name__: unittest.main()
import _path _path.fix() ## # python standard library # from functools import partial import unittest ## # pygrapes modules # from pygrapes.serializer.abstract import Abstract class AbstractSerializerTestCase(unittest.TestCase): def test_method_dumps_exists(self): self.assertTrue(hasattr(Abstract(), 'dumps')) def test_method_dumps_expects_one_arg(self): self.assertRaises(TypeError, Abstract().dumps) def test_dumps_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1)) def test_method_loads_exists(self): self.assertTrue(hasattr(Abstract(), 'loads')) def test_method_loads_expects_one_arg(self): self.assertRaises(TypeError, Abstract().loads) def test_loads_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Abstract().loads, 1)) if "__main__" == __name__: unittest.main()
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class
Python
bsd-3-clause
michalbachowski/pygrapes,michalbachowski/pygrapes,michalbachowski/pygrapes
import _path _path.fix() ## # python standard library # from functools import partial import unittest ## # pygrapes modules # - from pygrapes.serializer import Base + from pygrapes.serializer.abstract import Abstract - class BaseSerializerTestCase(unittest.TestCase): + class AbstractSerializerTestCase(unittest.TestCase): def test_method_dumps_exists(self): - self.assertTrue(hasattr(Base(), 'dumps')) + self.assertTrue(hasattr(Abstract(), 'dumps')) def test_method_dumps_expects_one_arg(self): - self.assertRaises(TypeError, Base().dumps) + self.assertRaises(TypeError, Abstract().dumps) def test_dumps_method_must_be_implemented(self): - self.assertRaises(NotImplementedError, partial(Base().dumps, 1)) + self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1)) def test_method_loads_exists(self): - self.assertTrue(hasattr(Base(), 'loads')) + self.assertTrue(hasattr(Abstract(), 'loads')) def test_method_loads_expects_one_arg(self): - self.assertRaises(TypeError, Base().loads) + self.assertRaises(TypeError, Abstract().loads) def test_loads_method_must_be_implemented(self): - self.assertRaises(NotImplementedError, partial(Base().loads, 1)) + self.assertRaises(NotImplementedError, partial(Abstract().loads, 1)) if "__main__" == __name__: unittest.main()
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class
## Code Before: import _path _path.fix() ## # python standard library # from functools import partial import unittest ## # pygrapes modules # from pygrapes.serializer import Base class BaseSerializerTestCase(unittest.TestCase): def test_method_dumps_exists(self): self.assertTrue(hasattr(Base(), 'dumps')) def test_method_dumps_expects_one_arg(self): self.assertRaises(TypeError, Base().dumps) def test_dumps_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Base().dumps, 1)) def test_method_loads_exists(self): self.assertTrue(hasattr(Base(), 'loads')) def test_method_loads_expects_one_arg(self): self.assertRaises(TypeError, Base().loads) def test_loads_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Base().loads, 1)) if "__main__" == __name__: unittest.main() ## Instruction: Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class ## Code After: import _path _path.fix() ## # python standard library # from functools import partial import unittest ## # pygrapes modules # from pygrapes.serializer.abstract import Abstract class AbstractSerializerTestCase(unittest.TestCase): def test_method_dumps_exists(self): self.assertTrue(hasattr(Abstract(), 'dumps')) def test_method_dumps_expects_one_arg(self): self.assertRaises(TypeError, Abstract().dumps) def test_dumps_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1)) def test_method_loads_exists(self): self.assertTrue(hasattr(Abstract(), 'loads')) def test_method_loads_expects_one_arg(self): self.assertRaises(TypeError, Abstract().loads) def test_loads_method_must_be_implemented(self): self.assertRaises(NotImplementedError, partial(Abstract().loads, 1)) if "__main__" == __name__: unittest.main()
import _path _path.fix() ## # python standard library # from functools import partial import unittest ## # pygrapes modules # - from pygrapes.serializer import Base ? ^ ^^ + from pygrapes.serializer.abstract import Abstract ? +++++++++ ^^^^^ ^^ - class BaseSerializerTestCase(unittest.TestCase): ? ^ ^^ + class AbstractSerializerTestCase(unittest.TestCase): ? ^^^^^ ^^ def test_method_dumps_exists(self): - self.assertTrue(hasattr(Base(), 'dumps')) ? ^ ^^ + self.assertTrue(hasattr(Abstract(), 'dumps')) ? ^^^^^ ^^ def test_method_dumps_expects_one_arg(self): - self.assertRaises(TypeError, Base().dumps) ? ^ ^^ + self.assertRaises(TypeError, Abstract().dumps) ? ^^^^^ ^^ def test_dumps_method_must_be_implemented(self): - self.assertRaises(NotImplementedError, partial(Base().dumps, 1)) ? ^ ^^ + self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1)) ? ^^^^^ ^^ def test_method_loads_exists(self): - self.assertTrue(hasattr(Base(), 'loads')) ? ^ ^^ + self.assertTrue(hasattr(Abstract(), 'loads')) ? ^^^^^ ^^ def test_method_loads_expects_one_arg(self): - self.assertRaises(TypeError, Base().loads) ? ^ ^^ + self.assertRaises(TypeError, Abstract().loads) ? ^^^^^ ^^ def test_loads_method_must_be_implemented(self): - self.assertRaises(NotImplementedError, partial(Base().loads, 1)) ? ^ ^^ + self.assertRaises(NotImplementedError, partial(Abstract().loads, 1)) ? ^^^^^ ^^ if "__main__" == __name__: unittest.main()