commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1b7767dbc4fbaf69a6bf83a3989d5e672e0c7488
|
django_countries/filters.py
|
django_countries/filters.py
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
class CountryFilter(admin.SimpleListFilter):
"""
A country filter for Django admin that only returns a list of countries related to the model.
"""
title = _('Country')
parameter_name = 'country'
def lookups(self, request, model_admin):
return set([
(obj.country, obj.country.name) for obj in model_admin.model.objects.exclude(
country__isnull=True
).exclude(country__exact='')
])
def queryset(self, request, queryset):
if self.value():
return queryset.filter(country=self.value())
else:
return queryset
|
from django.contrib import admin
from django.utils.encoding import force_text
from django.utils.translation import ugettext as _
class CountryFilter(admin.FieldListFilter):
"""
A country filter for Django admin that only returns a list of countries
related to the model.
"""
title = _('Country')
def expected_parameters(self):
return [self.field.name]
def choices(self, changelist):
yield {
'selected': self.value() is None,
'query_string': changelist.get_query_string(
{}, [self.field.name]),
'display': _('All'),
}
for lookup, title in self.lookup_choices(changelist):
yield {
'selected': self.value() == force_text(lookup),
'query_string': changelist.get_query_string(
{self.field.name: lookup}, []),
'display': title,
}
def lookup_choices(self, changelist):
codes = changelist.queryset.values_list(self.field.name, flat=True)
for k, v in self.field.get_choices(include_blank=False):
if k in codes:
yield k, v
|
Change the admin filter to a FieldListFilter
|
Change the admin filter to a FieldListFilter
|
Python
|
mit
|
schinckel/django-countries,SmileyChris/django-countries,pimlie/django-countries
|
from django.contrib import admin
+ from django.utils.encoding import force_text
- from django.utils.translation import ugettext_lazy as _
+ from django.utils.translation import ugettext as _
+
- class CountryFilter(admin.SimpleListFilter):
+ class CountryFilter(admin.FieldListFilter):
"""
- A country filter for Django admin that only returns a list of countries related to the model.
+ A country filter for Django admin that only returns a list of countries
+ related to the model.
"""
title = _('Country')
- parameter_name = 'country'
+ def expected_parameters(self):
+ return [self.field.name]
- def lookups(self, request, model_admin):
- return set([
- (obj.country, obj.country.name) for obj in model_admin.model.objects.exclude(
- country__isnull=True
- ).exclude(country__exact='')
- ])
- def queryset(self, request, queryset):
- if self.value():
- return queryset.filter(country=self.value())
- else:
- return queryset
+ def choices(self, changelist):
+ yield {
+ 'selected': self.value() is None,
+ 'query_string': changelist.get_query_string(
+ {}, [self.field.name]),
+ 'display': _('All'),
+ }
+ for lookup, title in self.lookup_choices(changelist):
+ yield {
+ 'selected': self.value() == force_text(lookup),
+ 'query_string': changelist.get_query_string(
+ {self.field.name: lookup}, []),
+ 'display': title,
+ }
+ def lookup_choices(self, changelist):
+ codes = changelist.queryset.values_list(self.field.name, flat=True)
+ for k, v in self.field.get_choices(include_blank=False):
+ if k in codes:
+ yield k, v
|
Change the admin filter to a FieldListFilter
|
## Code Before:
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
class CountryFilter(admin.SimpleListFilter):
"""
A country filter for Django admin that only returns a list of countries related to the model.
"""
title = _('Country')
parameter_name = 'country'
def lookups(self, request, model_admin):
return set([
(obj.country, obj.country.name) for obj in model_admin.model.objects.exclude(
country__isnull=True
).exclude(country__exact='')
])
def queryset(self, request, queryset):
if self.value():
return queryset.filter(country=self.value())
else:
return queryset
## Instruction:
Change the admin filter to a FieldListFilter
## Code After:
from django.contrib import admin
from django.utils.encoding import force_text
from django.utils.translation import ugettext as _
class CountryFilter(admin.FieldListFilter):
"""
A country filter for Django admin that only returns a list of countries
related to the model.
"""
title = _('Country')
def expected_parameters(self):
return [self.field.name]
def choices(self, changelist):
yield {
'selected': self.value() is None,
'query_string': changelist.get_query_string(
{}, [self.field.name]),
'display': _('All'),
}
for lookup, title in self.lookup_choices(changelist):
yield {
'selected': self.value() == force_text(lookup),
'query_string': changelist.get_query_string(
{self.field.name: lookup}, []),
'display': title,
}
def lookup_choices(self, changelist):
codes = changelist.queryset.values_list(self.field.name, flat=True)
for k, v in self.field.get_choices(include_blank=False):
if k in codes:
yield k, v
|
// ... existing code ...
from django.contrib import admin
from django.utils.encoding import force_text
from django.utils.translation import ugettext as _
class CountryFilter(admin.FieldListFilter):
"""
A country filter for Django admin that only returns a list of countries
related to the model.
"""
// ... modified code ...
title = _('Country')
def expected_parameters(self):
return [self.field.name]
def choices(self, changelist):
yield {
'selected': self.value() is None,
'query_string': changelist.get_query_string(
{}, [self.field.name]),
'display': _('All'),
}
for lookup, title in self.lookup_choices(changelist):
yield {
'selected': self.value() == force_text(lookup),
'query_string': changelist.get_query_string(
{self.field.name: lookup}, []),
'display': title,
}
def lookup_choices(self, changelist):
codes = changelist.queryset.values_list(self.field.name, flat=True)
for k, v in self.field.get_choices(include_blank=False):
if k in codes:
yield k, v
// ... rest of the code ...
|
c0ff6cbf293bca3f0757a62e05a14c56dbdf12a4
|
installscripts/jazz-terraform-unix-noinstances/scripts/health_check.py
|
installscripts/jazz-terraform-unix-noinstances/scripts/health_check.py
|
import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
|
import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
|
Fix travis issue for v1.13.1 release
|
Fix travis issue for v1.13.1 release
|
Python
|
apache-2.0
|
tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer
|
import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
- if max_tries == 1:
+ if max_tries == 1:
- return False
+ return False
+ else:
+ max_tries -= 1
+ try:
+ response = client.describe_target_health(TargetGroupArn=str(tg_arn))
+ if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
+ time.sleep(30)
+ return True
else:
- max_tries -= 1
- try:
- response = client.describe_target_health(TargetGroupArn=str(tg_arn))
- if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
- time.sleep(30)
- return True
- else:
- time.sleep(30)
- health_check_tg(client, tg_arn, max_tries)
- except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
+ except Exception:
+ time.sleep(30)
+ health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
|
Fix travis issue for v1.13.1 release
|
## Code Before:
import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
## Instruction:
Fix travis issue for v1.13.1 release
## Code After:
import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
|
# ... existing code ...
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
# ... modified code ...
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
# ... rest of the code ...
|
c0c73dd73f13e8d1d677cc2d7cad5c2f63217751
|
python/tests/test_rmm.py
|
python/tests/test_rmm.py
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
Python
|
apache-2.0
|
gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf
|
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
- from libgdf_cffi import libgdf
- from librmm_cffi import ffi, librmm
+ from librmm_cffi import librmm as rmm
+ from .utils import gen_rand
- from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
- from .utils import buffer_as_bits
-
_dtypes = [np.int32]
+ _nelems = [1, 2, 7, 8, 9, 32, 128]
- _nelems = [128]
-
- @pytest.fixture(scope="module")
- def rmm():
- print("initialize librmm")
- assert librmm.initialize() == librmm.RMM_SUCCESS
- yield librmm
- print("finalize librmm")
- assert librmm.finalize() == librmm.RMM_SUCCESS
-
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
- def test_rmm_alloc(dtype, nelem, rmm):
+ def test_rmm_alloc(dtype, nelem):
-
- expect_fn = np.add
- test_fn = libgdf.gdf_add_generic
-
- #import cffi
- #ffi = cffi.FFI()
-
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
- assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
- assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
-
|
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
|
## Code Before:
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from libgdf_cffi import libgdf
from librmm_cffi import ffi, librmm
from .utils import new_column, unwrap_devary, get_dtype, gen_rand, fix_zeros
from .utils import buffer_as_bits
_dtypes = [np.int32]
_nelems = [128]
@pytest.fixture(scope="module")
def rmm():
print("initialize librmm")
assert librmm.initialize() == librmm.RMM_SUCCESS
yield librmm
print("finalize librmm")
assert librmm.finalize() == librmm.RMM_SUCCESS
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem, rmm):
expect_fn = np.add
test_fn = libgdf.gdf_add_generic
#import cffi
#ffi = cffi.FFI()
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
assert rmm.free_device_array_memory(d_in) == rmm.RMM_SUCCESS
assert rmm.free_device_array_memory(d_result) == rmm.RMM_SUCCESS
## Instruction:
Improve librmm python API and convert all pytests to use RMM to create device_arrays.
## Code After:
import pytest
import functools
from itertools import product
import numpy as np
from numba import cuda
from librmm_cffi import librmm as rmm
from .utils import gen_rand
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
h_in = gen_rand(dtype, nelem)
h_result = gen_rand(dtype, nelem)
d_in = rmm.to_device(h_in)
d_result = rmm.device_array_like(d_in)
d_result.copy_to_device(d_in)
h_result = d_result.copy_to_host()
print('expect')
print(h_in)
print('got')
print(h_result)
np.testing.assert_array_equal(h_result, h_in)
|
// ... existing code ...
from librmm_cffi import librmm as rmm
from .utils import gen_rand
// ... modified code ...
_dtypes = [np.int32]
_nelems = [1, 2, 7, 8, 9, 32, 128]
...
@pytest.mark.parametrize('dtype,nelem', list(product(_dtypes, _nelems)))
def test_rmm_alloc(dtype, nelem):
# data
...
np.testing.assert_array_equal(h_result, h_in)
// ... rest of the code ...
|
8441acfd5071e8b63fde816f67e167997045d510
|
Lib/misc/setup.py
|
Lib/misc/setup.py
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
Remove extra noise on install.
|
Remove extra noise on install.
|
Python
|
bsd-3-clause
|
jseabold/scipy,richardotis/scipy,anntzer/scipy,fredrikw/scipy,behzadnouri/scipy,aman-iitj/scipy,mortada/scipy,njwilson23/scipy,trankmichael/scipy,trankmichael/scipy,apbard/scipy,niknow/scipy,aman-iitj/scipy,behzadnouri/scipy,FRidh/scipy,vanpact/scipy,Eric89GXL/scipy,rmcgibbo/scipy,larsmans/scipy,Shaswat27/scipy,ogrisel/scipy,niknow/scipy,chatcannon/scipy,mhogg/scipy,petebachant/scipy,jor-/scipy,nonhermitian/scipy,mgaitan/scipy,chatcannon/scipy,Eric89GXL/scipy,gdooper/scipy,jonycgn/scipy,FRidh/scipy,fernand/scipy,juliantaylor/scipy,apbard/scipy,vigna/scipy,sargas/scipy,pschella/scipy,mhogg/scipy,chatcannon/scipy,Kamp9/scipy,juliantaylor/scipy,behzadnouri/scipy,surhudm/scipy,ales-erjavec/scipy,kalvdans/scipy,gertingold/scipy,piyush0609/scipy,surhudm/scipy,nonhermitian/scipy,mgaitan/scipy,pbrod/scipy,mortonjt/scipy,rmcgibbo/scipy,matthew-brett/scipy,aeklant/scipy,fredrikw/scipy,lukauskas/scipy,sonnyhu/scipy,raoulbq/scipy,sonnyhu/scipy,njwilson23/scipy,haudren/scipy,pyramania/scipy,zerothi/scipy,arokem/scipy,mikebenfield/scipy,minhlongdo/scipy,grlee77/scipy,gdooper/scipy,bkendzior/scipy,anntzer/scipy,dch312/scipy,grlee77/scipy,fredrikw/scipy,haudren/scipy,zerothi/scipy,witcxc/scipy,mtrbean/scipy,sriki18/scipy,jakevdp/scipy,sonnyhu/scipy,pbrod/scipy,giorgiop/scipy,vhaasteren/scipy,ogrisel/scipy,gertingold/scipy,nvoron23/scipy,andyfaff/scipy,andim/scipy,jamestwebber/scipy,maniteja123/scipy,FRidh/scipy,Kamp9/scipy,jjhelmus/scipy,WillieMaddox/scipy,nmayorov/scipy,Gillu13/scipy,jjhelmus/scipy,Eric89GXL/scipy,woodscn/scipy,sriki18/scipy,pizzathief/scipy,newemailjdm/scipy,ChanderG/scipy,felipebetancur/scipy,Stefan-Endres/scipy,gfyoung/scipy,endolith/scipy,ales-erjavec/scipy,giorgiop/scipy,WarrenWeckesser/scipy,surhudm/scipy,sriki18/scipy,Eric89GXL/scipy,anntzer/scipy,kleskjr/scipy,vberaudi/scipy,rgommers/scipy,nvoron23/scipy,pyramania/scipy,kalvdans/scipy,rgommers/scipy,felipebetancur/scipy,Newman101/scipy,ortylp/scipy,dominicelse/scipy,aarchiba/scipy,vhaasteren/scipy,endolith/scipy,nonhermitian/scipy,pizzathief/scipy,surhudm/scipy,Dapid/scipy,jseabold/scipy,kleskjr/scipy,Newman101/scipy,andyfaff/scipy,aman-iitj/scipy,tylerjereddy/scipy,mikebenfield/scipy,futurulus/scipy,kalvdans/scipy,teoliphant/scipy,maciejkula/scipy,mingwpy/scipy,jor-/scipy,ChanderG/scipy,mtrbean/scipy,endolith/scipy,cpaulik/scipy,surhudm/scipy,rgommers/scipy,mortonjt/scipy,perimosocordiae/scipy,rmcgibbo/scipy,mdhaber/scipy,Shaswat27/scipy,woodscn/scipy,jakevdp/scipy,maciejkula/scipy,andim/scipy,WarrenWeckesser/scipy,rgommers/scipy,Shaswat27/scipy,rmcgibbo/scipy,argriffing/scipy,jonycgn/scipy,gef756/scipy,befelix/scipy,andim/scipy,Srisai85/scipy,minhlongdo/scipy,ChanderG/scipy,sauliusl/scipy,sauliusl/scipy,trankmichael/scipy,ilayn/scipy,kleskjr/scipy,newemailjdm/scipy,gdooper/scipy,ortylp/scipy,vhaasteren/scipy,juliantaylor/scipy,josephcslater/scipy,lukauskas/scipy,teoliphant/scipy,Gillu13/scipy,vigna/scipy,zxsted/scipy,jamestwebber/scipy,apbard/scipy,pnedunuri/scipy,dominicelse/scipy,grlee77/scipy,witcxc/scipy,raoulbq/scipy,zaxliu/scipy,petebachant/scipy,perimosocordiae/scipy,Gillu13/scipy,mdhaber/scipy,jseabold/scipy,jamestwebber/scipy,hainm/scipy,zxsted/scipy,FRidh/scipy,cpaulik/scipy,gef756/scipy,FRidh/scipy,grlee77/scipy,trankmichael/scipy,e-q/scipy,maciejkula/scipy,richardotis/scipy,dominicelse/scipy,haudren/scipy,sriki18/scipy,dch312/scipy,efiring/scipy,piyush0609/scipy,jsilter/scipy,sargas/scipy,matthewalbani/scipy,Gillu13/scipy,maniteja123/scipy,gef756/scipy,lhilt/scipy,andim/scipy,Stefan-Endres/scipy,ilayn/scipy,niknow/scipy,mhogg/scipy,petebachant/scipy,giorgiop/scipy,anntzer/scipy,larsmans/scipy,minhlongdo/scipy,aman-iitj/scipy,kalvdans/scipy,zerothi/scipy,efiring/scipy,pnedunuri/scipy,perimosocordiae/scipy,sauliusl/scipy,jonycgn/scipy,Newman101/scipy,Gillu13/scipy,vhaasteren/scipy,andyfaff/scipy,apbard/scipy,jor-/scipy,sargas/scipy,ales-erjavec/scipy,raoulbq/scipy,jonycgn/scipy,nvoron23/scipy,richardotis/scipy,lukauskas/scipy,ChanderG/scipy,minhlongdo/scipy,Shaswat27/scipy,woodscn/scipy,niknow/scipy,aeklant/scipy,mingwpy/scipy,vhaasteren/scipy,anielsen001/scipy,njwilson23/scipy,ogrisel/scipy,fernand/scipy,anntzer/scipy,jakevdp/scipy,person142/scipy,pbrod/scipy,pbrod/scipy,aarchiba/scipy,vberaudi/scipy,tylerjereddy/scipy,Kamp9/scipy,dch312/scipy,njwilson23/scipy,hainm/scipy,endolith/scipy,zerothi/scipy,nvoron23/scipy,minhlongdo/scipy,sriki18/scipy,scipy/scipy,gertingold/scipy,petebachant/scipy,pizzathief/scipy,nmayorov/scipy,mhogg/scipy,mtrbean/scipy,kalvdans/scipy,mgaitan/scipy,mhogg/scipy,matthew-brett/scipy,scipy/scipy,jamestwebber/scipy,aarchiba/scipy,gfyoung/scipy,ales-erjavec/scipy,zaxliu/scipy,vberaudi/scipy,pschella/scipy,jseabold/scipy,perimosocordiae/scipy,zxsted/scipy,behzadnouri/scipy,gfyoung/scipy,woodscn/scipy,mingwpy/scipy,maniteja123/scipy,mdhaber/scipy,josephcslater/scipy,Kamp9/scipy,ndchorley/scipy,sriki18/scipy,trankmichael/scipy,Stefan-Endres/scipy,niknow/scipy,witcxc/scipy,argriffing/scipy,vanpact/scipy,person142/scipy,lhilt/scipy,mtrbean/scipy,haudren/scipy,larsmans/scipy,efiring/scipy,mtrbean/scipy,hainm/scipy,zxsted/scipy,ChanderG/scipy,mortada/scipy,giorgiop/scipy,aman-iitj/scipy,josephcslater/scipy,vanpact/scipy,matthew-brett/scipy,teoliphant/scipy,matthewalbani/scipy,newemailjdm/scipy,woodscn/scipy,sonnyhu/scipy,aeklant/scipy,anielsen001/scipy,andyfaff/scipy,cpaulik/scipy,perimosocordiae/scipy,pizzathief/scipy,njwilson23/scipy,e-q/scipy,trankmichael/scipy,lhilt/scipy,tylerjereddy/scipy,teoliphant/scipy,vigna/scipy,mdhaber/scipy,ogrisel/scipy,petebachant/scipy,raoulbq/scipy,felipebetancur/scipy,nmayorov/scipy,njwilson23/scipy,sargas/scipy,larsmans/scipy,jamestwebber/scipy,newemailjdm/scipy,rgommers/scipy,Stefan-Endres/scipy,rmcgibbo/scipy,mgaitan/scipy,anielsen001/scipy,arokem/scipy,jjhelmus/scipy,zaxliu/scipy,nvoron23/scipy,chatcannon/scipy,mikebenfield/scipy,richardotis/scipy,mortonjt/scipy,nonhermitian/scipy,befelix/scipy,mikebenfield/scipy,gertingold/scipy,gdooper/scipy,pnedunuri/scipy,befelix/scipy,scipy/scipy,nonhermitian/scipy,WillieMaddox/scipy,WillieMaddox/scipy,behzadnouri/scipy,haudren/scipy,piyush0609/scipy,dominicelse/scipy,Shaswat27/scipy,richardotis/scipy,WarrenWeckesser/scipy,gfyoung/scipy,mortada/scipy,efiring/scipy,ales-erjavec/scipy,WarrenWeckesser/scipy,mingwpy/scipy,aeklant/scipy,piyush0609/scipy,felipebetancur/scipy,mhogg/scipy,pbrod/scipy,mdhaber/scipy,maniteja123/scipy,anielsen001/scipy,Srisai85/scipy,mdhaber/scipy,pschella/scipy,pnedunuri/scipy,mortada/scipy,mingwpy/scipy,piyush0609/scipy,endolith/scipy,ortylp/scipy,mgaitan/scipy,surhudm/scipy,WillieMaddox/scipy,giorgiop/scipy,Srisai85/scipy,Newman101/scipy,maciejkula/scipy,perimosocordiae/scipy,person142/scipy,josephcslater/scipy,anntzer/scipy,gfyoung/scipy,pyramania/scipy,fredrikw/scipy,grlee77/scipy,kleskjr/scipy,Stefan-Endres/scipy,sonnyhu/scipy,cpaulik/scipy,Kamp9/scipy,ndchorley/scipy,nmayorov/scipy,gef756/scipy,vanpact/scipy,arokem/scipy,ndchorley/scipy,jonycgn/scipy,e-q/scipy,Dapid/scipy,ales-erjavec/scipy,jsilter/scipy,argriffing/scipy,zxsted/scipy,pyramania/scipy,jseabold/scipy,witcxc/scipy,fernand/scipy,sauliusl/scipy,zaxliu/scipy,lhilt/scipy,scipy/scipy,lhilt/scipy,juliantaylor/scipy,bkendzior/scipy,aman-iitj/scipy,Eric89GXL/scipy,cpaulik/scipy,aarchiba/scipy,fernand/scipy,fredrikw/scipy,Kamp9/scipy,zerothi/scipy,pizzathief/scipy,fernand/scipy,jonycgn/scipy,zxsted/scipy,andyfaff/scipy,kleskjr/scipy,mortonjt/scipy,jseabold/scipy,vigna/scipy,e-q/scipy,jakevdp/scipy,vanpact/scipy,mingwpy/scipy,futurulus/scipy,andim/scipy,pnedunuri/scipy,sonnyhu/scipy,matthew-brett/scipy,lukauskas/scipy,vanpact/scipy,jjhelmus/scipy,Srisai85/scipy,jsilter/scipy,argriffing/scipy,bkendzior/scipy,felipebetancur/scipy,hainm/scipy,pyramania/scipy,vberaudi/scipy,apbard/scipy,befelix/scipy,giorgiop/scipy,hainm/scipy,WarrenWeckesser/scipy,fredrikw/scipy,jor-/scipy,larsmans/scipy,ilayn/scipy,josephcslater/scipy,vigna/scipy,matthew-brett/scipy,person142/scipy,futurulus/scipy,zerothi/scipy,vberaudi/scipy,gertingold/scipy,ndchorley/scipy,arokem/scipy,arokem/scipy,raoulbq/scipy,matthewalbani/scipy,ndchorley/scipy,anielsen001/scipy,matthewalbani/scipy,pschella/scipy,cpaulik/scipy,bkendzior/scipy,gdooper/scipy,Shaswat27/scipy,sargas/scipy,Eric89GXL/scipy,aarchiba/scipy,ChanderG/scipy,ortylp/scipy,efiring/scipy,teoliphant/scipy,richardotis/scipy,andyfaff/scipy,jor-/scipy,ilayn/scipy,WarrenWeckesser/scipy,argriffing/scipy,maciejkula/scipy,mtrbean/scipy,futurulus/scipy,scipy/scipy,futurulus/scipy,person142/scipy,ilayn/scipy,aeklant/scipy,Newman101/scipy,nvoron23/scipy,felipebetancur/scipy,dch312/scipy,futurulus/scipy,raoulbq/scipy,lukauskas/scipy,anielsen001/scipy,zaxliu/scipy,efiring/scipy,lukauskas/scipy,minhlongdo/scipy,gef756/scipy,argriffing/scipy,ilayn/scipy,tylerjereddy/scipy,Newman101/scipy,witcxc/scipy,rmcgibbo/scipy,scipy/scipy,haudren/scipy,behzadnouri/scipy,chatcannon/scipy,larsmans/scipy,endolith/scipy,maniteja123/scipy,piyush0609/scipy,mortada/scipy,ortylp/scipy,jakevdp/scipy,Dapid/scipy,woodscn/scipy,newemailjdm/scipy,sauliusl/scipy,vhaasteren/scipy,vberaudi/scipy,FRidh/scipy,tylerjereddy/scipy,pschella/scipy,juliantaylor/scipy,newemailjdm/scipy,petebachant/scipy,chatcannon/scipy,Srisai85/scipy,sauliusl/scipy,mortonjt/scipy,kleskjr/scipy,dch312/scipy,Stefan-Endres/scipy,befelix/scipy,hainm/scipy,ndchorley/scipy,Gillu13/scipy,fernand/scipy,andim/scipy,gef756/scipy,pnedunuri/scipy,Dapid/scipy,jsilter/scipy,dominicelse/scipy,matthewalbani/scipy,mikebenfield/scipy,bkendzior/scipy,WillieMaddox/scipy,Dapid/scipy,jsilter/scipy,niknow/scipy,ortylp/scipy,mortonjt/scipy,pbrod/scipy,jjhelmus/scipy,mortada/scipy,WillieMaddox/scipy,e-q/scipy,Dapid/scipy,mgaitan/scipy,zaxliu/scipy,nmayorov/scipy,maniteja123/scipy,Srisai85/scipy,ogrisel/scipy
|
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
- print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
Remove extra noise on install.
|
## Code Before:
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
print "########", config
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
## Instruction:
Remove extra noise on install.
## Code After:
import os
from numpy.distutils.misc_util import Configuration
def configuration(parent_package='',top_path=None):
config = Configuration('misc',parent_package, top_path)
config.add_data_files('lena.dat')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration())
|
# ... existing code ...
config.add_data_files('lena.dat')
return config
# ... rest of the code ...
|
1606445e137ecae5a1f5c50edcc5e851d399b313
|
project_euler/025.1000_digit_fibonacci_number.py
|
project_euler/025.1000_digit_fibonacci_number.py
|
'''
Problem 025
The Fibonacci sequence is defined by the recurrence relation:
Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
Hence the first 12 terms will be:
F1 = 1
F2 = 1
F3 = 2
F4 = 3
F5 = 5
F6 = 8
F7 = 13
F8 = 21
F9 = 34
F10 = 55
F11 = 89
F12 = 144
The 12th term, F12, is the first term to contain three digits.
What is the index of the first term in the Fibonacci sequence to contain 1000 digits?
Solution: Copyright 2017 Dave Cuthbert, MIT License
'''
def gen_fibonacci():
term_1 = 1
term_2 = 1
while True:
next_term = term_1 + term_2
yield next
term_2 = term_1
term_1 = next_term
def solve_problem():
get_fibonacci = gen_fibonacci
for i in range(10):
print(next(get_fibonacci))
return("DONE")
if __name__ == "__main__":
print(solve_problem())
|
'''
Problem 025
The Fibonacci sequence is defined by the recurrence relation:
Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
Hence the first 12 terms will be:
F1 = 1
F2 = 1
F3 = 2
F4 = 3
F5 = 5
F6 = 8
F7 = 13
F8 = 21
F9 = 34
F10 = 55
F11 = 89
F12 = 144
The 12th term, F12, is the first term to contain three digits.
What is the index of the first term in the Fibonacci sequence to contain 1000 digits?
Solution: Copyright 2017 Dave Cuthbert, MIT License
'''
def gen_fibonacci():
term_1 = 1
term_2 = 1
while True:
next_term = term_1 + term_2
yield next_term
term_2 = term_1
term_1 = next_term
def solve_problem():
get_fibonacci = gen_fibonacci()
count = 3 # Not starting with first term in sequence
while True:
current_fib = next(get_fibonacci)
if len(str(current_fib)) >= 1000:
return(count)
count +=1
if __name__ == "__main__":
print(solve_problem())
|
Solve 1000 digit fib number
|
Solve 1000 digit fib number
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
'''
Problem 025
The Fibonacci sequence is defined by the recurrence relation:
Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
Hence the first 12 terms will be:
F1 = 1
F2 = 1
F3 = 2
F4 = 3
F5 = 5
F6 = 8
F7 = 13
F8 = 21
F9 = 34
F10 = 55
F11 = 89
F12 = 144
The 12th term, F12, is the first term to contain three digits.
What is the index of the first term in the Fibonacci sequence to contain 1000 digits?
Solution: Copyright 2017 Dave Cuthbert, MIT License
'''
def gen_fibonacci():
term_1 = 1
term_2 = 1
while True:
next_term = term_1 + term_2
- yield next
+ yield next_term
term_2 = term_1
term_1 = next_term
def solve_problem():
- get_fibonacci = gen_fibonacci
+ get_fibonacci = gen_fibonacci()
- for i in range(10):
+
+ count = 3 # Not starting with first term in sequence
+ while True:
- print(next(get_fibonacci))
+ current_fib = next(get_fibonacci)
-
- return("DONE")
+ if len(str(current_fib)) >= 1000:
+ return(count)
+
+ count +=1
if __name__ == "__main__":
print(solve_problem())
|
Solve 1000 digit fib number
|
## Code Before:
'''
Problem 025
The Fibonacci sequence is defined by the recurrence relation:
Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
Hence the first 12 terms will be:
F1 = 1
F2 = 1
F3 = 2
F4 = 3
F5 = 5
F6 = 8
F7 = 13
F8 = 21
F9 = 34
F10 = 55
F11 = 89
F12 = 144
The 12th term, F12, is the first term to contain three digits.
What is the index of the first term in the Fibonacci sequence to contain 1000 digits?
Solution: Copyright 2017 Dave Cuthbert, MIT License
'''
def gen_fibonacci():
term_1 = 1
term_2 = 1
while True:
next_term = term_1 + term_2
yield next
term_2 = term_1
term_1 = next_term
def solve_problem():
get_fibonacci = gen_fibonacci
for i in range(10):
print(next(get_fibonacci))
return("DONE")
if __name__ == "__main__":
print(solve_problem())
## Instruction:
Solve 1000 digit fib number
## Code After:
'''
Problem 025
The Fibonacci sequence is defined by the recurrence relation:
Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1.
Hence the first 12 terms will be:
F1 = 1
F2 = 1
F3 = 2
F4 = 3
F5 = 5
F6 = 8
F7 = 13
F8 = 21
F9 = 34
F10 = 55
F11 = 89
F12 = 144
The 12th term, F12, is the first term to contain three digits.
What is the index of the first term in the Fibonacci sequence to contain 1000 digits?
Solution: Copyright 2017 Dave Cuthbert, MIT License
'''
def gen_fibonacci():
term_1 = 1
term_2 = 1
while True:
next_term = term_1 + term_2
yield next_term
term_2 = term_1
term_1 = next_term
def solve_problem():
get_fibonacci = gen_fibonacci()
count = 3 # Not starting with first term in sequence
while True:
current_fib = next(get_fibonacci)
if len(str(current_fib)) >= 1000:
return(count)
count +=1
if __name__ == "__main__":
print(solve_problem())
|
// ... existing code ...
next_term = term_1 + term_2
yield next_term
term_2 = term_1
// ... modified code ...
def solve_problem():
get_fibonacci = gen_fibonacci()
count = 3 # Not starting with first term in sequence
while True:
current_fib = next(get_fibonacci)
if len(str(current_fib)) >= 1000:
return(count)
count +=1
// ... rest of the code ...
|
52bb18cf1249e3f48764a7ed4e9546439692c5cb
|
packages/Python/lldbsuite/test/functionalities/data-formatter/synthcapping/fooSynthProvider.py
|
packages/Python/lldbsuite/test/functionalities/data-formatter/synthcapping/fooSynthProvider.py
|
import lldb
class fooSynthProvider:
def __init__(self, valobj, dict):
self.valobj = valobj;
self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
def num_children(self):
return 3;
def get_child_at_index(self, index):
if index == 0:
child = self.valobj.GetChildMemberWithName('a');
if index == 1:
child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
if index == 2:
child = self.valobj.GetChildMemberWithName('r');
return child;
def get_child_index(self, name):
if name == 'a':
return 0;
if name == 'fake_a':
return 1;
return 2;
|
import lldb
class fooSynthProvider:
def __init__(self, valobj, dict):
self.valobj = valobj;
self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
def num_children(self):
return 3;
def get_child_at_index(self, index):
if index == 0:
child = self.valobj.GetChildMemberWithName('a');
if index == 1:
child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
if index == 2:
child = self.valobj.GetChildMemberWithName('r');
return child;
def get_child_index(self, name):
if name == 'a':
return 0;
if name == 'fake_a':
return 1;
return 2;
|
Fix TestSyntheticCapping for Python 3.
|
Fix TestSyntheticCapping for Python 3.
In Python 3, whitespace inconsistences are errors. This synthetic
provider had mixed tabs and spaces, as well as inconsistent
indentation widths. This led to the file not being imported,
and naturally the test failing. No functional change here, just
whitespace.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@258751 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb
|
import lldb
class fooSynthProvider:
- def __init__(self, valobj, dict):
+ def __init__(self, valobj, dict):
- self.valobj = valobj;
+ self.valobj = valobj;
- self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
+ self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
- def num_children(self):
+ def num_children(self):
- return 3;
+ return 3;
- def get_child_at_index(self, index):
+ def get_child_at_index(self, index):
- if index == 0:
+ if index == 0:
- child = self.valobj.GetChildMemberWithName('a');
+ child = self.valobj.GetChildMemberWithName('a');
- if index == 1:
+ if index == 1:
- child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
+ child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
- if index == 2:
+ if index == 2:
- child = self.valobj.GetChildMemberWithName('r');
+ child = self.valobj.GetChildMemberWithName('r');
- return child;
+ return child;
- def get_child_index(self, name):
+ def get_child_index(self, name):
- if name == 'a':
+ if name == 'a':
- return 0;
+ return 0;
- if name == 'fake_a':
+ if name == 'fake_a':
- return 1;
+ return 1;
- return 2;
+ return 2;
|
Fix TestSyntheticCapping for Python 3.
|
## Code Before:
import lldb
class fooSynthProvider:
def __init__(self, valobj, dict):
self.valobj = valobj;
self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
def num_children(self):
return 3;
def get_child_at_index(self, index):
if index == 0:
child = self.valobj.GetChildMemberWithName('a');
if index == 1:
child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
if index == 2:
child = self.valobj.GetChildMemberWithName('r');
return child;
def get_child_index(self, name):
if name == 'a':
return 0;
if name == 'fake_a':
return 1;
return 2;
## Instruction:
Fix TestSyntheticCapping for Python 3.
## Code After:
import lldb
class fooSynthProvider:
def __init__(self, valobj, dict):
self.valobj = valobj;
self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
def num_children(self):
return 3;
def get_child_at_index(self, index):
if index == 0:
child = self.valobj.GetChildMemberWithName('a');
if index == 1:
child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
if index == 2:
child = self.valobj.GetChildMemberWithName('r');
return child;
def get_child_index(self, name):
if name == 'a':
return 0;
if name == 'fake_a':
return 1;
return 2;
|
// ... existing code ...
class fooSynthProvider:
def __init__(self, valobj, dict):
self.valobj = valobj;
self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt)
def num_children(self):
return 3;
def get_child_at_index(self, index):
if index == 0:
child = self.valobj.GetChildMemberWithName('a');
if index == 1:
child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type);
if index == 2:
child = self.valobj.GetChildMemberWithName('r');
return child;
def get_child_index(self, name):
if name == 'a':
return 0;
if name == 'fake_a':
return 1;
return 2;
// ... rest of the code ...
|
d3675b777dc95f296f26bdd9b8b05311ceac6ba5
|
cyder/core/system/migrations/0006_rename_table_from_system_key_value_to_system_kv.py
|
cyder/core/system/migrations/0006_rename_table_from_system_key_value_to_system_kv.py
|
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_table('system_key_value', 'system_kv')
def backwards(self, orm):
db.rename_table('system_kv', 'system_key_value')
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_table('system_key_value', 'system_kv')
def backwards(self, orm):
db.rename_table('system_kv', 'system_key_value')
models = {
'system.system': {
'Meta': {'object_name': 'System', 'db_table': "'system'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'system.systemkeyvalue': {
'Meta': {'unique_together': "(('key', 'value', 'system'),)", 'object_name': 'SystemKeyValue', 'db_table': "'system_kv'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_quoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['system']
|
Add ORM freeze thing to SystemKeyValue migration
|
Add ORM freeze thing to SystemKeyValue migration
|
Python
|
bsd-3-clause
|
akeym/cyder,murrown/cyder,zeeman/cyder,akeym/cyder,OSU-Net/cyder,murrown/cyder,OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,akeym/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder
|
+ import datetime
from south.db import db
from south.v2 import SchemaMigration
+ from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_table('system_key_value', 'system_kv')
def backwards(self, orm):
db.rename_table('system_kv', 'system_key_value')
+ models = {
+ 'system.system': {
+ 'Meta': {'object_name': 'System', 'db_table': "'system'"},
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ 'system.systemkeyvalue': {
+ 'Meta': {'unique_together': "(('key', 'value', 'system'),)", 'object_name': 'SystemKeyValue', 'db_table': "'system_kv'"},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_quoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']"}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ }
+ }
+
+ complete_apps = ['system']
+
|
Add ORM freeze thing to SystemKeyValue migration
|
## Code Before:
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_table('system_key_value', 'system_kv')
def backwards(self, orm):
db.rename_table('system_kv', 'system_key_value')
## Instruction:
Add ORM freeze thing to SystemKeyValue migration
## Code After:
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_table('system_key_value', 'system_kv')
def backwards(self, orm):
db.rename_table('system_kv', 'system_key_value')
models = {
'system.system': {
'Meta': {'object_name': 'System', 'db_table': "'system'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'system.systemkeyvalue': {
'Meta': {'unique_together': "(('key', 'value', 'system'),)", 'object_name': 'SystemKeyValue', 'db_table': "'system_kv'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_quoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['system']
|
...
import datetime
from south.db import db
...
from south.v2 import SchemaMigration
from django.db import models
...
db.rename_table('system_kv', 'system_key_value')
models = {
'system.system': {
'Meta': {'object_name': 'System', 'db_table': "'system'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'system.systemkeyvalue': {
'Meta': {'unique_together': "(('key', 'value', 'system'),)", 'object_name': 'SystemKeyValue', 'db_table': "'system_kv'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_quoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['system']
...
|
cd374366dc6d49cc543a037fba8398e5b724c382
|
tabula/util.py
|
tabula/util.py
|
import warnings
import platform
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used."""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
def deprecated_option(option):
warnings.warn("Call to deprecated option {}.".format(option),
category=DeprecationWarning, stacklevel=2)
def java_version():
import subprocess
try:
res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
res = res.decode()
except subprocess.CalledProcessError as e:
res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`"
return res
def environment_info():
import sys
import distro
import textwrap
from .__version__ import __version__
print("""Python version:
{}
Java version:
{}
tabula-py version: {}
platform: {}
uname:
{}
linux_distribution: {}
mac_ver: {}
""".format(
sys.version,
textwrap.indent(java_version().strip(), " "),
__version__,
platform.platform(),
str(platform.uname()),
distro.linux_distribution(),
platform.mac_ver(),
))
|
import warnings
import platform
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used."""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
def deprecated_option(option):
warnings.warn("Call to deprecated option {}.".format(option),
category=DeprecationWarning, stacklevel=2)
def java_version():
import subprocess
try:
res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
res = res.decode()
except subprocess.CalledProcessError as e:
res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`"
return res
def environment_info():
import sys
import distro
from .__version__ import __version__
print("""Python version:
{}
Java version:
{}
tabula-py version: {}
platform: {}
uname:
{}
linux_distribution: {}
mac_ver: {}
""".format(
sys.version,
java_version().strip(),
__version__,
platform.platform(),
str(platform.uname()),
distro.linux_distribution(),
platform.mac_ver(),
))
|
Remove textwrap because python 2.7 lacks indent() function
|
Remove textwrap because python 2.7 lacks indent() function
|
Python
|
mit
|
chezou/tabula-py
|
import warnings
import platform
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used."""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
def deprecated_option(option):
warnings.warn("Call to deprecated option {}.".format(option),
category=DeprecationWarning, stacklevel=2)
def java_version():
import subprocess
try:
res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
res = res.decode()
except subprocess.CalledProcessError as e:
res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`"
return res
def environment_info():
import sys
import distro
- import textwrap
from .__version__ import __version__
print("""Python version:
{}
Java version:
- {}
+ {}
tabula-py version: {}
platform: {}
uname:
{}
linux_distribution: {}
mac_ver: {}
""".format(
sys.version,
- textwrap.indent(java_version().strip(), " "),
+ java_version().strip(),
__version__,
platform.platform(),
str(platform.uname()),
distro.linux_distribution(),
platform.mac_ver(),
))
|
Remove textwrap because python 2.7 lacks indent() function
|
## Code Before:
import warnings
import platform
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used."""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
def deprecated_option(option):
warnings.warn("Call to deprecated option {}.".format(option),
category=DeprecationWarning, stacklevel=2)
def java_version():
import subprocess
try:
res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
res = res.decode()
except subprocess.CalledProcessError as e:
res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`"
return res
def environment_info():
import sys
import distro
import textwrap
from .__version__ import __version__
print("""Python version:
{}
Java version:
{}
tabula-py version: {}
platform: {}
uname:
{}
linux_distribution: {}
mac_ver: {}
""".format(
sys.version,
textwrap.indent(java_version().strip(), " "),
__version__,
platform.platform(),
str(platform.uname()),
distro.linux_distribution(),
platform.mac_ver(),
))
## Instruction:
Remove textwrap because python 2.7 lacks indent() function
## Code After:
import warnings
import platform
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used."""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
def deprecated_option(option):
warnings.warn("Call to deprecated option {}.".format(option),
category=DeprecationWarning, stacklevel=2)
def java_version():
import subprocess
try:
res = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT)
res = res.decode()
except subprocess.CalledProcessError as e:
res = "`java -version` faild. `java` command is not found from this Python process. Please ensure to set PATH for `java`"
return res
def environment_info():
import sys
import distro
from .__version__ import __version__
print("""Python version:
{}
Java version:
{}
tabula-py version: {}
platform: {}
uname:
{}
linux_distribution: {}
mac_ver: {}
""".format(
sys.version,
java_version().strip(),
__version__,
platform.platform(),
str(platform.uname()),
distro.linux_distribution(),
platform.mac_ver(),
))
|
...
import distro
from .__version__ import __version__
...
Java version:
{}
tabula-py version: {}
...
sys.version,
java_version().strip(),
__version__,
...
|
ca042edc7f9709f2217b669fb5a68e9aac3ab61c
|
cbv/management/commands/cbv_dumpversion.py
|
cbv/management/commands/cbv_dumpversion.py
|
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
|
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
Allow dumpdata of specific version of cbv.
|
Allow dumpdata of specific version of cbv.
|
Python
|
bsd-2-clause
|
abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector
|
+ import json
+
+ from django.db.models.query import QuerySet
from django.core.management import call_command
- from django.core.management.commands import LabelCommand
+ from django.core.management.base import LabelCommand
+ from django.core import serializers
+ from cbv import models
class Command(LabelCommand):
+ """Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
- # Because django will use the default manager of each model, we
- # monkeypatch the manager to filter by our label before calling
- # the dumpdata command to dump only the subset of data we want.
+ filtered_models = {
+ models.ProjectVersion: 'version_number',
+ models.Module: 'project_version__version_number',
+ models.ModuleAttribute: 'module__project_version__version_number',
+ models.Function: 'module__project_version__version_number',
+ models.Klass: 'module__project_version__version_number',
+ models.KlassAttribute: 'klass__module__project_version__version_number',
+ models.Method: 'klass__module__project_version__version_number',
+ }
+ objects = []
+ for model, version_arg in filtered_models.items():
+ filter_kwargs = {version_arg: label}
+ result = model.objects.filter(**filter_kwargs)
+ objects = objects + list(result)
+ for obj in objects:
+ obj.pk = None
+ dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
+ self.stdout.write(dump)
- # Set the
-
- # Call the dumpdata command.
- call_command('dumpdata', 'cbv')
-
|
Allow dumpdata of specific version of cbv.
|
## Code Before:
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
## Instruction:
Allow dumpdata of specific version of cbv.
## Code After:
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
// ... existing code ...
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
// ... modified code ...
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
// ... rest of the code ...
|
90ade823700da61824c113759f847bf08823c148
|
nova/objects/__init__.py
|
nova/objects/__init__.py
|
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
|
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
Add security_group_rule to objects registry
|
Add security_group_rule to objects registry
This adds the security_group_rule module to the objects registry,
which allows a service to make sure that all of its objects are
registered before any could be received over RPC.
We don't really have a test for any of these because of the nature
of how they're imported. Refactoring this later could provide some
incremental steps to making this more testable.
Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27
Closes-Bug: #1264816
|
Python
|
apache-2.0
|
citrix-openstack-build/oslo.versionedobjects,openstack/oslo.versionedobjects
|
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
+ __import__('nova.objects.security_group_rule')
|
Add security_group_rule to objects registry
|
## Code Before:
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
## Instruction:
Add security_group_rule to objects registry
## Code After:
def register_all():
# NOTE(danms): You must make sure your object gets imported in this
# function in order for it to be registered by services that may
# need to receive it via RPC.
__import__('nova.objects.dns_domain')
__import__('nova.objects.instance')
__import__('nova.objects.instance_info_cache')
__import__('nova.objects.security_group')
__import__('nova.objects.migration')
__import__('nova.objects.quotas')
__import__('nova.objects.virtual_interface')
__import__('nova.objects.network')
__import__('nova.objects.block_device')
__import__('nova.objects.fixed_ip')
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
|
// ... existing code ...
__import__('nova.objects.floating_ip')
__import__('nova.objects.security_group_rule')
// ... rest of the code ...
|
c1dfbc8e8b3ae29436c584d906636ea541dfb6a8
|
apps/storybase_asset/embedable_resource/__init__.py
|
apps/storybase_asset/embedable_resource/__init__.py
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
Allow embedding of Google Docs by URL
|
Allow embedding of Google Docs by URL
|
Python
|
mit
|
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
|
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
+
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
- def get_html(self, url):
+ def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
+
+ class GoogleMapProvider(EmbedableResourceProvider):
+ def get_html(self, url, width=425, height=350):
+ if not self.match(url):
+ raise UrlNotMatched
+
+ return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
+
EmbedableResource.register(GoogleSpreadsheetProvider)
+ EmbedableResource.register(GoogleMapProvider)
|
Allow embedding of Google Docs by URL
|
## Code Before:
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
EmbedableResource.register(GoogleSpreadsheetProvider)
## Instruction:
Allow embedding of Google Docs by URL
## Code After:
import re
from exceptions import UrlNotMatched
class EmbedableResource(object):
resource_providers = []
@classmethod
def register(cls, provider_cls):
cls.resource_providers.append(provider_cls)
@classmethod
def get_html(cls, url):
for provider_cls in cls.resource_providers:
provider = provider_cls()
try:
return provider.get_html(url)
except UrlNotMatched:
pass
raise UrlNotMatched
class EmbedableResourceProvider(object):
url_pattern = r''
def match(self, url):
return re.match(self.url_pattern, url) is not None
def get_html(self, url):
raise UrlNotMatched
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
raise UrlNotMatched
return "<iframe width='500' height='300' frameborder='0' src='%s&widget=true'></iframe>" % url
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
|
// ... existing code ...
class GoogleSpreadsheetProvider(EmbedableResourceProvider):
// ... modified code ...
url_pattern = r'^https://docs.google.com/spreadsheet/pub\?key=[0-9a-zA-Z]+'
def get_html(self, url, width=500, height=300):
if not self.match(url):
...
class GoogleMapProvider(EmbedableResourceProvider):
def get_html(self, url, width=425, height=350):
if not self.match(url):
raise UrlNotMatched
return '<iframe width="%d" height="%d" frameborder="0" scrolling="no" marginheight="0" marginwidth="0" src="%s&output=embed"></iframe><br /><small><a href="%s&source=embed" style="color:#0000FF;text-align:left">View Larger Map</a></small>' % (width, height, url, url)
EmbedableResource.register(GoogleSpreadsheetProvider)
EmbedableResource.register(GoogleMapProvider)
// ... rest of the code ...
|
ab3dc6466b617a5bf5a0bec2c122eca645c1d29f
|
cloudera-framework-assembly/src/main/resources/python/script_util.py
|
cloudera-framework-assembly/src/main/resources/python/script_util.py
|
import os
def hdfs_make_qualified(path):
return path if 'CF_HADOOP_DEFAULT_FS' not in os.environ else os.environ['CF_HADOOP_DEFAULT_FS'] + path
|
import os
import re
def hdfs_make_qualified(path):
return path if (re.match(r'[.]*://[.]*', path) or 'CF_HADOOP_DEFAULT_FS' not in os.environ) \
else os.environ['CF_HADOOP_DEFAULT_FS'] + path
|
Update python script util to detect if paths are already fully qualified
|
Update python script util to detect if paths are already fully qualified
|
Python
|
apache-2.0
|
ggear/cloudera-framework,ggear/cloudera-framework,ggear/cloudera-framework
|
import os
+ import re
def hdfs_make_qualified(path):
- return path if 'CF_HADOOP_DEFAULT_FS' not in os.environ else os.environ['CF_HADOOP_DEFAULT_FS'] + path
+ return path if (re.match(r'[.]*://[.]*', path) or 'CF_HADOOP_DEFAULT_FS' not in os.environ) \
+ else os.environ['CF_HADOOP_DEFAULT_FS'] + path
|
Update python script util to detect if paths are already fully qualified
|
## Code Before:
import os
def hdfs_make_qualified(path):
return path if 'CF_HADOOP_DEFAULT_FS' not in os.environ else os.environ['CF_HADOOP_DEFAULT_FS'] + path
## Instruction:
Update python script util to detect if paths are already fully qualified
## Code After:
import os
import re
def hdfs_make_qualified(path):
return path if (re.match(r'[.]*://[.]*', path) or 'CF_HADOOP_DEFAULT_FS' not in os.environ) \
else os.environ['CF_HADOOP_DEFAULT_FS'] + path
|
...
import os
import re
...
def hdfs_make_qualified(path):
return path if (re.match(r'[.]*://[.]*', path) or 'CF_HADOOP_DEFAULT_FS' not in os.environ) \
else os.environ['CF_HADOOP_DEFAULT_FS'] + path
...
|
4bc55a6b1bdef357acd24e6aba34a57f689e9da0
|
bokeh/command/subcommands/__init__.py
|
bokeh/command/subcommands/__init__.py
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
return results
all = _collect()
del _collect
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
results = sorted(results, key=lambda attr: attr.name)
return results
all = _collect()
del _collect
|
Sort subcommands.all so the tested results are deterministic
|
Sort subcommands.all so the tested results are deterministic
|
Python
|
bsd-3-clause
|
phobson/bokeh,clairetang6/bokeh,aiguofer/bokeh,jakirkham/bokeh,msarahan/bokeh,mindriot101/bokeh,philippjfr/bokeh,schoolie/bokeh,stonebig/bokeh,azjps/bokeh,percyfal/bokeh,bokeh/bokeh,draperjames/bokeh,percyfal/bokeh,draperjames/bokeh,msarahan/bokeh,ptitjano/bokeh,msarahan/bokeh,quasiben/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,bokeh/bokeh,justacec/bokeh,dennisobrien/bokeh,ericmjl/bokeh,philippjfr/bokeh,dennisobrien/bokeh,aavanian/bokeh,azjps/bokeh,clairetang6/bokeh,ptitjano/bokeh,ericmjl/bokeh,azjps/bokeh,timsnyder/bokeh,timsnyder/bokeh,rs2/bokeh,mindriot101/bokeh,clairetang6/bokeh,jakirkham/bokeh,DuCorey/bokeh,bokeh/bokeh,azjps/bokeh,mindriot101/bokeh,aavanian/bokeh,aavanian/bokeh,azjps/bokeh,bokeh/bokeh,clairetang6/bokeh,aavanian/bokeh,philippjfr/bokeh,justacec/bokeh,aiguofer/bokeh,mindriot101/bokeh,schoolie/bokeh,draperjames/bokeh,phobson/bokeh,DuCorey/bokeh,aiguofer/bokeh,philippjfr/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,rs2/bokeh,stonebig/bokeh,ericmjl/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,jakirkham/bokeh,DuCorey/bokeh,jakirkham/bokeh,ptitjano/bokeh,quasiben/bokeh,percyfal/bokeh,percyfal/bokeh,justacec/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,rs2/bokeh,phobson/bokeh,justacec/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,schoolie/bokeh,phobson/bokeh,rs2/bokeh,bokeh/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,ptitjano/bokeh,phobson/bokeh,dennisobrien/bokeh,philippjfr/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,msarahan/bokeh,aiguofer/bokeh,stonebig/bokeh,DuCorey/bokeh,draperjames/bokeh,jakirkham/bokeh,aavanian/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,timsnyder/bokeh,draperjames/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh
|
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
+ results = sorted(results, key=lambda attr: attr.name)
+
return results
all = _collect()
del _collect
|
Sort subcommands.all so the tested results are deterministic
|
## Code Before:
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
return results
all = _collect()
del _collect
## Instruction:
Sort subcommands.all so the tested results are deterministic
## Code After:
def _collect():
from importlib import import_module
from os import listdir
from os.path import dirname
from ..subcommand import Subcommand
results = []
for file in listdir(dirname(__file__)):
if not file.endswith(".py") or file in ("__init__.py", "__main__.py"):
continue
modname = file.rstrip(".py")
mod = import_module("." + modname, __package__)
for name in dir(mod):
attr = getattr(mod, name)
if isinstance(attr, type) and issubclass(attr, Subcommand):
if not hasattr(attr, 'name'): continue # excludes abstract bases
results.append(attr)
results = sorted(results, key=lambda attr: attr.name)
return results
all = _collect()
del _collect
|
...
results = sorted(results, key=lambda attr: attr.name)
return results
...
|
16516b1ec44e3e44d2dc96a6f3d021268ce4e71d
|
osgtest/tests/test_84_xrootd.py
|
osgtest/tests/test_84_xrootd.py
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(core.state['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
|
Fix test if server started in xrootd cleanup code
|
Fix test if server started in xrootd cleanup code
git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17920 4e558342-562e-0410-864c-e07659590f8c
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
- self.skip_ok_if(['xrootd.started-server'] == False, 'did not start server')
+ self.skip_ok_if(core.state['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
|
Fix test if server started in xrootd cleanup code
|
## Code Before:
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
## Instruction:
Fix test if server started in xrootd cleanup code
## Code After:
import os
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
import unittest
class TestStopXrootd(osgunittest.OSGTestCase):
def test_01_stop_xrootd(self):
if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True):
files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd")
files.restore('/etc/xrootd/auth_file',"xrootd")
files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd")
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(core.state['xrootd.started-server'] == False, 'did not start server')
command = ('service', 'xrootd', 'stop')
stdout, _, fail = core.check_system(command, 'Stop Xrootd server')
self.assert_(stdout.find('FAILED') == -1, fail)
self.assert_(not os.path.exists(core.config['xrootd.pid-file']),
'Xrootd server PID file still present')
|
// ... existing code ...
core.skip_ok_unless_installed('xrootd')
self.skip_ok_if(core.state['xrootd.started-server'] == False, 'did not start server')
// ... rest of the code ...
|
0613c115f0ffccda8c6de9021c44d11085d84a1b
|
simplesqlite/_logger.py
|
simplesqlite/_logger.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import logbook
logger = logbook.Logger("SimpleSQLie")
logger.disable()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
from __future__ import absolute_import
from __future__ import unicode_literals
import dataproperty
import logbook
import pytablereader
logger = logbook.Logger("SimpleSQLie")
logger.disable()
def set_logger(is_enable):
if is_enable != logger.disabled:
return
if is_enable:
logger.enable()
else:
logger.disable()
dataproperty.set_logger(is_enable)
pytablereader.set_logger(is_enable)
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
dataproperty.set_log_level(log_level)
pytablereader.set_log_level(log_level)
|
Modify to avoid excessive logger initialization
|
Modify to avoid excessive logger initialization
|
Python
|
mit
|
thombashi/SimpleSQLite,thombashi/SimpleSQLite
|
from __future__ import absolute_import
from __future__ import unicode_literals
+ import dataproperty
import logbook
+ import pytablereader
logger = logbook.Logger("SimpleSQLie")
logger.disable()
def set_logger(is_enable):
+ if is_enable != logger.disabled:
+ return
+
if is_enable:
logger.enable()
else:
logger.disable()
+
+ dataproperty.set_logger(is_enable)
+ pytablereader.set_logger(is_enable)
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
+ if log_level == logger.level:
+ return
+
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
+ dataproperty.set_log_level(log_level)
+ pytablereader.set_log_level(log_level)
+
|
Modify to avoid excessive logger initialization
|
## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
import logbook
logger = logbook.Logger("SimpleSQLie")
logger.disable()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
## Instruction:
Modify to avoid excessive logger initialization
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
import dataproperty
import logbook
import pytablereader
logger = logbook.Logger("SimpleSQLie")
logger.disable()
def set_logger(is_enable):
if is_enable != logger.disabled:
return
if is_enable:
logger.enable()
else:
logger.disable()
dataproperty.set_logger(is_enable)
pytablereader.set_logger(is_enable)
def set_log_level(log_level):
"""
Set logging level of this module. Using
`logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of
`logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if ``log_level`` is ``logbook.NOTSET``.
"""
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
dataproperty.set_log_level(log_level)
pytablereader.set_log_level(log_level)
|
# ... existing code ...
import dataproperty
import logbook
import pytablereader
# ... modified code ...
def set_logger(is_enable):
if is_enable != logger.disabled:
return
if is_enable:
...
logger.disable()
dataproperty.set_logger(is_enable)
pytablereader.set_logger(is_enable)
...
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
...
logger.level = log_level
dataproperty.set_log_level(log_level)
pytablereader.set_log_level(log_level)
# ... rest of the code ...
|
6e1a211ff1834f8047261d51737afcb0412075b5
|
memleak.py
|
memleak.py
|
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
|
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
if i % 25 == 0:
logging.info(util.nvidia_smi())
|
Add logging of memory use
|
Add logging of memory use
|
Python
|
mit
|
MaestroGraph/sparse-hyper
|
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
+ if i % 25 == 0:
+ logging.info(util.nvidia_smi())
+
+
|
Add logging of memory use
|
## Code Before:
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
## Instruction:
Add logging of memory use
## Code After:
import torch
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch import nn, optim
from torch.nn import Parameter
from tqdm import trange
import util, logging, os, psutil
import hyper
logging.basicConfig(filename='memleak.log',level=logging.INFO)
torch.manual_seed(2)
B = 256
M = 32
IN = OUT = tuple([M] * 8)
W, H = len(IN) + len(OUT), 2048
for i in trange(int(10e7)):
x = torch.randn((B, H, W)) * M
x = x.long().cuda()
x = Variable(x)
x, _ = hyper.flatten_indices(x, IN, OUT)
if i % 25 == 0:
logging.info(util.nvidia_smi())
|
// ... existing code ...
if i % 25 == 0:
logging.info(util.nvidia_smi())
// ... rest of the code ...
|
f25a32dd0180af91277ace186fc878c8baffed65
|
heisen/core/__init__.py
|
heisen/core/__init__.py
|
from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
|
from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
|
Use connection pool for jsonrpc
|
Use connection pool for jsonrpc
|
Python
|
mit
|
HeisenCore/heisen
|
from heisen.config import settings
from heisen.core.log import logger
- from jsonrpclib.request import Connection
+ from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
- servers = {
- 'self': [
- ('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
- ],
- }
+ if settings.CREDENTIALS:
+ username, passowrd = settings.CREDENTIALS[0]
+ else:
+ username = passowrd = None
+
+ servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
- return Connection(servers, 'heisen', settings.APP_NAME)
+ return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
|
Use connection pool for jsonrpc
|
## Code Before:
from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
## Instruction:
Use connection pool for jsonrpc
## Code After:
from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
|
// ... existing code ...
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
// ... modified code ...
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
...
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
// ... rest of the code ...
|
f8292dced6aef64950280a33e9980a7998f07104
|
tests/services/shop/base.py
|
tests/services/shop/base.py
|
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
self.db.session.add(article)
self.db.session.commit()
return article
|
from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
return article_service.create_article(
shop_id,
article.item_number,
article.description,
article.price,
article.tax_rate,
article.quantity,
)
|
Create test articles via service
|
Create test articles via service
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
+
+ from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
- self.db.session.add(article)
- self.db.session.commit()
+ return article_service.create_article(
+ shop_id,
+ article.item_number,
+ article.description,
+ article.price,
+ article.tax_rate,
+ article.quantity,
+ )
- return article
-
|
Create test articles via service
|
## Code Before:
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
self.db.session.add(article)
self.db.session.commit()
return article
## Instruction:
Create test articles via service
## Code After:
from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
return article_service.create_article(
shop_id,
article.item_number,
article.description,
article.price,
article.tax_rate,
article.quantity,
)
|
...
from byceps.services.shop.article import service as article_service
...
return article_service.create_article(
shop_id,
article.item_number,
article.description,
article.price,
article.tax_rate,
article.quantity,
)
...
|
90d3f00cd8fea8fab9274069ac06ea461f8e4dfd
|
channels/ooo_b_r/app.py
|
channels/ooo_b_r/app.py
|
from utils import get_url, weighted_random_subreddit
# Group chat https://yal.sh/dvdahoy
t_channel = '-1001065558871'
subreddit = weighted_random_subreddit({
'ANormalDayInRussia': 1.0,
'ANormalDayInAmerica': 0.1,
'ANormalDayInJapan': 0.01
})
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
return False
elif what == 'other':
return False
elif what == 'album':
r2t.send_album(url)
return True
elif what in ('gif', 'img'):
return r2t.send_gif_img(what, url, ext, text)
else:
return False
|
from utils import get_url, weighted_random_subreddit
# Group chat https://yal.sh/dvdahoy
t_channel = '-1001065558871'
subreddit = weighted_random_subreddit({
'ANormalDayInRussia': 1.0,
'ANormalDayInAmerica': 0.1,
'ANormalDayInJapan': 0.01
})
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
return r2t.send_gif_img(what, url, ext, text)
|
Send only pics and gifs to OOO_B_R.
|
Send only pics and gifs to OOO_B_R.
|
Python
|
mit
|
nsiregar/reddit2telegram,Fillll/reddit2telegram,nsiregar/reddit2telegram,Fillll/reddit2telegram
|
from utils import get_url, weighted_random_subreddit
# Group chat https://yal.sh/dvdahoy
t_channel = '-1001065558871'
subreddit = weighted_random_subreddit({
'ANormalDayInRussia': 1.0,
'ANormalDayInAmerica': 0.1,
'ANormalDayInJapan': 0.01
})
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
- if what == 'text':
- return False
- elif what == 'other':
- return False
- elif what == 'album':
- r2t.send_album(url)
- return True
- elif what in ('gif', 'img'):
- return r2t.send_gif_img(what, url, ext, text)
+ return r2t.send_gif_img(what, url, ext, text)
- else:
- return False
|
Send only pics and gifs to OOO_B_R.
|
## Code Before:
from utils import get_url, weighted_random_subreddit
# Group chat https://yal.sh/dvdahoy
t_channel = '-1001065558871'
subreddit = weighted_random_subreddit({
'ANormalDayInRussia': 1.0,
'ANormalDayInAmerica': 0.1,
'ANormalDayInJapan': 0.01
})
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
return False
elif what == 'other':
return False
elif what == 'album':
r2t.send_album(url)
return True
elif what in ('gif', 'img'):
return r2t.send_gif_img(what, url, ext, text)
else:
return False
## Instruction:
Send only pics and gifs to OOO_B_R.
## Code After:
from utils import get_url, weighted_random_subreddit
# Group chat https://yal.sh/dvdahoy
t_channel = '-1001065558871'
subreddit = weighted_random_subreddit({
'ANormalDayInRussia': 1.0,
'ANormalDayInAmerica': 0.1,
'ANormalDayInJapan': 0.01
})
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
return r2t.send_gif_img(what, url, ext, text)
|
# ... existing code ...
return r2t.send_gif_img(what, url, ext, text)
# ... rest of the code ...
|
c154d79ba13d95f3240efd9eb4725cf9fc16060f
|
forms.py
|
forms.py
|
from flask_wtf import Form
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(Form):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
|
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
- from flask_wtf import Form
+ from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
- class Login(Form):
+ class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
|
## Code Before:
from flask_wtf import Form
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(Form):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
## Instruction:
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
## Code After:
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
|
...
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
...
class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
...
|
6487c04c85f890a8d767216efac24bf42fb9e387
|
spare5/client.py
|
spare5/client.py
|
import requests
from .resources.batches import Batches
from .resources.jobs import Jobs
DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2'
class Spare5Client(object):
def __init__(self, username, token, api_root=DEFAULT_API_ROOT):
super(Spare5Client, self).__init__()
self.api_root = api_root
self.username = username
self.token = token
self.batches = Batches(self)
def _make_request(self, verb, *args, **kwargs):
kwargs.update({
'auth': (self.username, self.token)
})
response = requests.request(verb, *args, **kwargs)
return response.json()
def _get(self, url, **kwargs):
return self._make_request('get', url, **kwargs)
def _post(self, url, data, **kwargs):
return self._make_request('post', url, data=data, **kwargs)
def _put(self, url, data, **kwargs):
return self._make_request('put', url, data=data, **kwargs)
def _delete(self, url, **kwargs):
return self._make_request('delete', url, **kwargs)
|
import requests
from .resources.batches import Batches
from .resources.jobs import Jobs
DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2'
class Spare5Client(object):
def __init__(self, username, token, api_root=DEFAULT_API_ROOT):
super(Spare5Client, self).__init__()
self.api_root = api_root
self.username = username
self.token = token
self.batches = Batches(self)
def _make_request(self, verb, *args, **kwargs):
kwargs.update({
'auth': (self.username, self.token),
'headers': {
'content-type': 'application/json',
},
})
response = requests.request(verb, *args, **kwargs)
return response.json()
def _get(self, url, **kwargs):
return self._make_request('get', url, **kwargs)
def _post(self, url, data, **kwargs):
return self._make_request('post', url, data=data, **kwargs)
def _put(self, url, data, **kwargs):
return self._make_request('put', url, data=data, **kwargs)
def _delete(self, url, **kwargs):
return self._make_request('delete', url, **kwargs)
|
Update to specify content-type header
|
Update to specify content-type header
|
Python
|
mit
|
roverdotcom/spare5-python
|
import requests
from .resources.batches import Batches
from .resources.jobs import Jobs
DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2'
class Spare5Client(object):
def __init__(self, username, token, api_root=DEFAULT_API_ROOT):
super(Spare5Client, self).__init__()
self.api_root = api_root
self.username = username
self.token = token
self.batches = Batches(self)
def _make_request(self, verb, *args, **kwargs):
kwargs.update({
- 'auth': (self.username, self.token)
+ 'auth': (self.username, self.token),
+ 'headers': {
+ 'content-type': 'application/json',
+ },
})
response = requests.request(verb, *args, **kwargs)
return response.json()
def _get(self, url, **kwargs):
return self._make_request('get', url, **kwargs)
def _post(self, url, data, **kwargs):
return self._make_request('post', url, data=data, **kwargs)
def _put(self, url, data, **kwargs):
return self._make_request('put', url, data=data, **kwargs)
def _delete(self, url, **kwargs):
return self._make_request('delete', url, **kwargs)
|
Update to specify content-type header
|
## Code Before:
import requests
from .resources.batches import Batches
from .resources.jobs import Jobs
DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2'
class Spare5Client(object):
def __init__(self, username, token, api_root=DEFAULT_API_ROOT):
super(Spare5Client, self).__init__()
self.api_root = api_root
self.username = username
self.token = token
self.batches = Batches(self)
def _make_request(self, verb, *args, **kwargs):
kwargs.update({
'auth': (self.username, self.token)
})
response = requests.request(verb, *args, **kwargs)
return response.json()
def _get(self, url, **kwargs):
return self._make_request('get', url, **kwargs)
def _post(self, url, data, **kwargs):
return self._make_request('post', url, data=data, **kwargs)
def _put(self, url, data, **kwargs):
return self._make_request('put', url, data=data, **kwargs)
def _delete(self, url, **kwargs):
return self._make_request('delete', url, **kwargs)
## Instruction:
Update to specify content-type header
## Code After:
import requests
from .resources.batches import Batches
from .resources.jobs import Jobs
DEFAULT_API_ROOT = 'http://app.spare5.com/partner/v2'
class Spare5Client(object):
def __init__(self, username, token, api_root=DEFAULT_API_ROOT):
super(Spare5Client, self).__init__()
self.api_root = api_root
self.username = username
self.token = token
self.batches = Batches(self)
def _make_request(self, verb, *args, **kwargs):
kwargs.update({
'auth': (self.username, self.token),
'headers': {
'content-type': 'application/json',
},
})
response = requests.request(verb, *args, **kwargs)
return response.json()
def _get(self, url, **kwargs):
return self._make_request('get', url, **kwargs)
def _post(self, url, data, **kwargs):
return self._make_request('post', url, data=data, **kwargs)
def _put(self, url, data, **kwargs):
return self._make_request('put', url, data=data, **kwargs)
def _delete(self, url, **kwargs):
return self._make_request('delete', url, **kwargs)
|
// ... existing code ...
kwargs.update({
'auth': (self.username, self.token),
'headers': {
'content-type': 'application/json',
},
})
// ... rest of the code ...
|
5ac84c4e9d8d68b7e89ebf344d2c93a5f7ef4c4c
|
notebooks/galapagos_to_pandas.py
|
notebooks/galapagos_to_pandas.py
|
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits',
out_filename=None):
"""Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file"""
from astropy.io import fits
import pandas as pd
import re
import tempfile
if out_filename is None:
out_filename = re.sub('.fits$', '', in_filename)+'.h5'
data = fits.getdata(in_filename, 1)
with tempfile.NamedTemporaryFile() as tmp:
with pd.get_store(tmp.name, mode='w') as tmpstore:
for n in data.names:
d = data[n]
if len(d.shape) == 1:
new_cols = pd.DataFrame(d, columns=[n])
else:
new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK'])
tmpstore[n] = new_cols
with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store:
# Use format='table' on next line to save as a pytables table
store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1))
return pd.HDFStore(out_filename)
|
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits',
out_filename=None, bands='RUGIZYJHK'):
"""Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file"""
from astropy.io import fits
import pandas as pd
import re
import tempfile
if out_filename is None:
out_filename = re.sub('.fits$', '', in_filename)+'.h5'
data = fits.getdata(in_filename, 1)
with tempfile.NamedTemporaryFile() as tmp:
with pd.get_store(tmp.name, mode='w') as tmpstore:
for n in data.names:
d = data[n]
if len(d.shape) == 1:
new_cols = pd.DataFrame(d, columns=[n])
else:
new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands])
tmpstore[n] = new_cols
with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store:
# Use format='table' on next line to save as a pytables table
store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1))
return pd.HDFStore(out_filename)
|
Allow specification of GALAPAGOS bands
|
Allow specification of GALAPAGOS bands
|
Python
|
mit
|
MegaMorph/megamorph-analysis
|
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits',
- out_filename=None):
+ out_filename=None, bands='RUGIZYJHK'):
"""Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file"""
from astropy.io import fits
import pandas as pd
import re
import tempfile
if out_filename is None:
out_filename = re.sub('.fits$', '', in_filename)+'.h5'
data = fits.getdata(in_filename, 1)
with tempfile.NamedTemporaryFile() as tmp:
with pd.get_store(tmp.name, mode='w') as tmpstore:
for n in data.names:
d = data[n]
if len(d.shape) == 1:
new_cols = pd.DataFrame(d, columns=[n])
else:
- new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK'])
+ new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands])
tmpstore[n] = new_cols
with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store:
# Use format='table' on next line to save as a pytables table
store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1))
return pd.HDFStore(out_filename)
|
Allow specification of GALAPAGOS bands
|
## Code Before:
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits',
out_filename=None):
"""Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file"""
from astropy.io import fits
import pandas as pd
import re
import tempfile
if out_filename is None:
out_filename = re.sub('.fits$', '', in_filename)+'.h5'
data = fits.getdata(in_filename, 1)
with tempfile.NamedTemporaryFile() as tmp:
with pd.get_store(tmp.name, mode='w') as tmpstore:
for n in data.names:
d = data[n]
if len(d.shape) == 1:
new_cols = pd.DataFrame(d, columns=[n])
else:
new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK'])
tmpstore[n] = new_cols
with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store:
# Use format='table' on next line to save as a pytables table
store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1))
return pd.HDFStore(out_filename)
## Instruction:
Allow specification of GALAPAGOS bands
## Code After:
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits',
out_filename=None, bands='RUGIZYJHK'):
"""Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file"""
from astropy.io import fits
import pandas as pd
import re
import tempfile
if out_filename is None:
out_filename = re.sub('.fits$', '', in_filename)+'.h5'
data = fits.getdata(in_filename, 1)
with tempfile.NamedTemporaryFile() as tmp:
with pd.get_store(tmp.name, mode='w') as tmpstore:
for n in data.names:
d = data[n]
if len(d.shape) == 1:
new_cols = pd.DataFrame(d, columns=[n])
else:
new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands])
tmpstore[n] = new_cols
with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store:
# Use format='table' on next line to save as a pytables table
store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1))
return pd.HDFStore(out_filename)
|
...
def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits',
out_filename=None, bands='RUGIZYJHK'):
"""Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file"""
...
else:
new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands])
tmpstore[n] = new_cols
...
|
edcde8ed3562e19b7bde43632965c2902a8e7f25
|
troposphere/sns.py
|
troposphere/sns.py
|
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
|
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere,ikben/troposphere,ikben/troposphere
|
- from . import AWSObject, AWSProperty
+ from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
+ 'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
## Code Before:
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
## Instruction:
Add Tags to SNS::Topic per 2019-11-31 changes
## Code After:
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
...
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
...
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
...
|
e64d13486fe20c44dde0dea6a6fed5a95eddbbd1
|
awx/main/notifications/email_backend.py
|
awx/main/notifications/email_backend.py
|
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
|
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
|
Remove Tower reference from email backend
|
Remove Tower reference from email backend
|
Python
|
apache-2.0
|
snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx
|
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
- body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
+ body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
|
Remove Tower reference from email backend
|
## Code Before:
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
## Instruction:
Remove Tower reference from email backend
## Code After:
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
|
# ... existing code ...
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
# ... rest of the code ...
|
be40174929193085ccd38683e64944fb4aabb26b
|
serial_reader.py
|
serial_reader.py
|
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
|
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
Add option to timestamp each line from serial
|
Add option to timestamp each line from serial
|
Python
|
unlicense
|
recursify/serial-debug-tool
|
from argparse import ArgumentParser
import sys
import serial
+ from datetime import datetime
- def run(device, baud):
+ def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
- if line:
+ if not line:
+ continue
+ if prefix:
+ line = prefix() + line
- sys.stdout.write(line)
+ sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
+ parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
+ help="Add timestamp to start of each line")
args = parser.parse_args()
+ prefix = None
+ if args.timestamp:
+ prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
- run(args.device, args.baud)
+ run(args.device, args.baud, prefix)
|
Add option to timestamp each line from serial
|
## Code Before:
from argparse import ArgumentParser
import sys
import serial
def run(device, baud):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if line:
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
args = parser.parse_args()
run(args.device, args.baud)
## Instruction:
Add option to timestamp each line from serial
## Code After:
from argparse import ArgumentParser
import sys
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
while True:
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('device',
help='serial device, typically /dev/tty.usbserial-*')
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
|
// ... existing code ...
import serial
from datetime import datetime
def run(device, baud, prefix=None):
with serial.Serial(device, baud, timeout=0.1) as ser:
// ... modified code ...
line = ser.readline()
if not line:
continue
if prefix:
line = prefix() + line
sys.stdout.write(line)
...
parser.add_argument('--baud', dest='baud', type=int, default=74880)
parser.add_argument('-t', '--timestamp', dest='timestamp', action='store_true',
help="Add timestamp to start of each line")
args = parser.parse_args()
prefix = None
if args.timestamp:
prefix = lambda: datetime.now().strftime("[%H:%M:%S.%f] ")
run(args.device, args.baud, prefix)
// ... rest of the code ...
|
f75d06702274257215229b83c4ff74de3dc72463
|
nnpy/tests.py
|
nnpy/tests.py
|
from __future__ import print_function
import nnpy, unittest
class Tests(unittest.TestCase):
def test_basic(self):
pub = nnpy.Socket(nnpy.AF_SP, nnpy.PUB)
pub.bind('inproc://foo')
self.assertEqual(pub.getsockopt(nnpy.SOL_SOCKET, nnpy.DOMAIN), 1)
sub = nnpy.Socket(nnpy.AF_SP, nnpy.SUB)
sub.connect('inproc://foo')
sub.setsockopt(nnpy.SUB, nnpy.SUB_SUBSCRIBE, '')
pub.send('FLUB')
poller = nnpy.PollSet((sub, nnpy.POLLIN))
self.assertEqual(poller.poll(), 1)
self.assertEqual(sub.recv(), 'FLUB')
self.assertEqual(pub.get_statistic(nnpy.STAT_MESSAGES_SENT), 1)
def suite():
return unittest.makeSuite(Tests)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
from __future__ import print_function
import nnpy, unittest
class Tests(unittest.TestCase):
def test_basic(self):
pub = nnpy.Socket(nnpy.AF_SP, nnpy.PUB)
pub.setsockopt(nnpy.SOL_SOCKET, nnpy.IPV4ONLY, 0)
pub.bind('inproc://foo')
self.assertEqual(pub.getsockopt(nnpy.SOL_SOCKET, nnpy.DOMAIN), 1)
sub = nnpy.Socket(nnpy.AF_SP, nnpy.SUB)
sub_conn = sub.connect('inproc://foo')
sub.setsockopt(nnpy.SUB, nnpy.SUB_SUBSCRIBE, '')
pub.send('FLUB')
poller = nnpy.PollSet((sub, nnpy.POLLIN))
self.assertEqual(poller.poll(), 1)
self.assertEqual(sub.recv(), 'FLUB')
self.assertEqual(pub.get_statistic(nnpy.STAT_MESSAGES_SENT), 1)
pub.close()
sub.shutdown(sub_conn)
def suite():
return unittest.makeSuite(Tests)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Add some more test coverage
|
Add some more test coverage
|
Python
|
mit
|
nanomsg/nnpy
|
from __future__ import print_function
import nnpy, unittest
class Tests(unittest.TestCase):
def test_basic(self):
pub = nnpy.Socket(nnpy.AF_SP, nnpy.PUB)
+ pub.setsockopt(nnpy.SOL_SOCKET, nnpy.IPV4ONLY, 0)
pub.bind('inproc://foo')
self.assertEqual(pub.getsockopt(nnpy.SOL_SOCKET, nnpy.DOMAIN), 1)
sub = nnpy.Socket(nnpy.AF_SP, nnpy.SUB)
- sub.connect('inproc://foo')
+ sub_conn = sub.connect('inproc://foo')
sub.setsockopt(nnpy.SUB, nnpy.SUB_SUBSCRIBE, '')
pub.send('FLUB')
poller = nnpy.PollSet((sub, nnpy.POLLIN))
self.assertEqual(poller.poll(), 1)
self.assertEqual(sub.recv(), 'FLUB')
self.assertEqual(pub.get_statistic(nnpy.STAT_MESSAGES_SENT), 1)
+ pub.close()
+ sub.shutdown(sub_conn)
def suite():
return unittest.makeSuite(Tests)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Add some more test coverage
|
## Code Before:
from __future__ import print_function
import nnpy, unittest
class Tests(unittest.TestCase):
def test_basic(self):
pub = nnpy.Socket(nnpy.AF_SP, nnpy.PUB)
pub.bind('inproc://foo')
self.assertEqual(pub.getsockopt(nnpy.SOL_SOCKET, nnpy.DOMAIN), 1)
sub = nnpy.Socket(nnpy.AF_SP, nnpy.SUB)
sub.connect('inproc://foo')
sub.setsockopt(nnpy.SUB, nnpy.SUB_SUBSCRIBE, '')
pub.send('FLUB')
poller = nnpy.PollSet((sub, nnpy.POLLIN))
self.assertEqual(poller.poll(), 1)
self.assertEqual(sub.recv(), 'FLUB')
self.assertEqual(pub.get_statistic(nnpy.STAT_MESSAGES_SENT), 1)
def suite():
return unittest.makeSuite(Tests)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
## Instruction:
Add some more test coverage
## Code After:
from __future__ import print_function
import nnpy, unittest
class Tests(unittest.TestCase):
def test_basic(self):
pub = nnpy.Socket(nnpy.AF_SP, nnpy.PUB)
pub.setsockopt(nnpy.SOL_SOCKET, nnpy.IPV4ONLY, 0)
pub.bind('inproc://foo')
self.assertEqual(pub.getsockopt(nnpy.SOL_SOCKET, nnpy.DOMAIN), 1)
sub = nnpy.Socket(nnpy.AF_SP, nnpy.SUB)
sub_conn = sub.connect('inproc://foo')
sub.setsockopt(nnpy.SUB, nnpy.SUB_SUBSCRIBE, '')
pub.send('FLUB')
poller = nnpy.PollSet((sub, nnpy.POLLIN))
self.assertEqual(poller.poll(), 1)
self.assertEqual(sub.recv(), 'FLUB')
self.assertEqual(pub.get_statistic(nnpy.STAT_MESSAGES_SENT), 1)
pub.close()
sub.shutdown(sub_conn)
def suite():
return unittest.makeSuite(Tests)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
# ... existing code ...
pub = nnpy.Socket(nnpy.AF_SP, nnpy.PUB)
pub.setsockopt(nnpy.SOL_SOCKET, nnpy.IPV4ONLY, 0)
pub.bind('inproc://foo')
# ... modified code ...
sub = nnpy.Socket(nnpy.AF_SP, nnpy.SUB)
sub_conn = sub.connect('inproc://foo')
sub.setsockopt(nnpy.SUB, nnpy.SUB_SUBSCRIBE, '')
...
self.assertEqual(pub.get_statistic(nnpy.STAT_MESSAGES_SENT), 1)
pub.close()
sub.shutdown(sub_conn)
# ... rest of the code ...
|
d03250e1af17a40be3b9aa70fef67e50ab556a87
|
numba2/compiler/layout.py
|
numba2/compiler/layout.py
|
from __future__ import print_function, division, absolute_import
from numba2 import conversion
from pykit import types as ptypes
from pykit.utils import ctypes_support
#===------------------------------------------------------------------===
# Types
#===------------------------------------------------------------------===
def representation_type(ty):
"""
Get the low-level representation type for a high-level (user-defined) type.
Returns
=======
The pykit type for the object layout.
"""
from numba2.lib import vectorobject
from numba2.lib import arrayobject
from numba2.runtime.obj import pointerobject
if ty.impl == pointerobject.Pointer:
(base,) = ty.parameters
return ptypes.Pointer(representation_type(base))
if ty.impl == vectorobject.Vector:
base, count = ty.parameters
return ptypes.Vector(representation_type(base), count)
if ty.impl == arrayobject.Array:
base, count = ty.parameters
return ptypes.Array(representation_type(base), count)
cty = conversion.ctype(ty)
result_type = ctypes_support.from_ctypes_type(cty)
if result_type.is_struct:
result_type = ptypes.Pointer(result_type)
return result_type
|
from __future__ import print_function, division, absolute_import
from numba2 import conversion
from pykit import types as ptypes
from pykit.utils import ctypes_support
#===------------------------------------------------------------------===
# Types
#===------------------------------------------------------------------===
def representation_type(ty):
"""
Get the low-level representation type for a high-level (user-defined) type.
Returns
=======
The pykit type for the object layout.
"""
# NOTE: special cases should be kept to an absolute minimum here. They
# should probably be introduced only if ctypes cannot represent the
# type
from numba2.lib import vectorobject
if ty.impl == vectorobject.Vector:
# Ctypes does not support vectors
base, count = ty.parameters
return ptypes.Vector(representation_type(base), count)
cty = conversion.ctype(ty)
result_type = ctypes_support.from_ctypes_type(cty)
if result_type.is_struct:
result_type = ptypes.Pointer(result_type)
return result_type
|
Remove some object representation clobbering code
|
Remove some object representation clobbering code
|
Python
|
bsd-2-clause
|
flypy/flypy,flypy/flypy
|
from __future__ import print_function, division, absolute_import
from numba2 import conversion
from pykit import types as ptypes
from pykit.utils import ctypes_support
#===------------------------------------------------------------------===
# Types
#===------------------------------------------------------------------===
def representation_type(ty):
"""
Get the low-level representation type for a high-level (user-defined) type.
Returns
=======
The pykit type for the object layout.
"""
+ # NOTE: special cases should be kept to an absolute minimum here. They
+ # should probably be introduced only if ctypes cannot represent the
+ # type
+
from numba2.lib import vectorobject
- from numba2.lib import arrayobject
- from numba2.runtime.obj import pointerobject
- if ty.impl == pointerobject.Pointer:
- (base,) = ty.parameters
- return ptypes.Pointer(representation_type(base))
if ty.impl == vectorobject.Vector:
+ # Ctypes does not support vectors
base, count = ty.parameters
return ptypes.Vector(representation_type(base), count)
- if ty.impl == arrayobject.Array:
- base, count = ty.parameters
- return ptypes.Array(representation_type(base), count)
cty = conversion.ctype(ty)
result_type = ctypes_support.from_ctypes_type(cty)
if result_type.is_struct:
result_type = ptypes.Pointer(result_type)
return result_type
|
Remove some object representation clobbering code
|
## Code Before:
from __future__ import print_function, division, absolute_import
from numba2 import conversion
from pykit import types as ptypes
from pykit.utils import ctypes_support
#===------------------------------------------------------------------===
# Types
#===------------------------------------------------------------------===
def representation_type(ty):
"""
Get the low-level representation type for a high-level (user-defined) type.
Returns
=======
The pykit type for the object layout.
"""
from numba2.lib import vectorobject
from numba2.lib import arrayobject
from numba2.runtime.obj import pointerobject
if ty.impl == pointerobject.Pointer:
(base,) = ty.parameters
return ptypes.Pointer(representation_type(base))
if ty.impl == vectorobject.Vector:
base, count = ty.parameters
return ptypes.Vector(representation_type(base), count)
if ty.impl == arrayobject.Array:
base, count = ty.parameters
return ptypes.Array(representation_type(base), count)
cty = conversion.ctype(ty)
result_type = ctypes_support.from_ctypes_type(cty)
if result_type.is_struct:
result_type = ptypes.Pointer(result_type)
return result_type
## Instruction:
Remove some object representation clobbering code
## Code After:
from __future__ import print_function, division, absolute_import
from numba2 import conversion
from pykit import types as ptypes
from pykit.utils import ctypes_support
#===------------------------------------------------------------------===
# Types
#===------------------------------------------------------------------===
def representation_type(ty):
"""
Get the low-level representation type for a high-level (user-defined) type.
Returns
=======
The pykit type for the object layout.
"""
# NOTE: special cases should be kept to an absolute minimum here. They
# should probably be introduced only if ctypes cannot represent the
# type
from numba2.lib import vectorobject
if ty.impl == vectorobject.Vector:
# Ctypes does not support vectors
base, count = ty.parameters
return ptypes.Vector(representation_type(base), count)
cty = conversion.ctype(ty)
result_type = ctypes_support.from_ctypes_type(cty)
if result_type.is_struct:
result_type = ptypes.Pointer(result_type)
return result_type
|
// ... existing code ...
"""
# NOTE: special cases should be kept to an absolute minimum here. They
# should probably be introduced only if ctypes cannot represent the
# type
from numba2.lib import vectorobject
if ty.impl == vectorobject.Vector:
# Ctypes does not support vectors
base, count = ty.parameters
// ... modified code ...
return ptypes.Vector(representation_type(base), count)
// ... rest of the code ...
|
44f603cd947f63101cf6b7eb8e49b5210cfa4f6f
|
wry/__init__.py
|
wry/__init__.py
|
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
|
Add version. Note this will cause the file to be modified in your
working copy. This change is gitignored
|
Python
|
apache-2.0
|
ocadotechnology/wry
|
+ import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
|
## Code Before:
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
## Instruction:
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
## Code After:
import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
...
import version
import AMTDevice
...
|
fb0b129216bd98a90cdee623157df5c7e4a742fb
|
blinkenlights/blinkenlights.py
|
blinkenlights/blinkenlights.py
|
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
loop.close()
|
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
os.remove('/tmp/coord.socket')
loop.close()
|
Clean up socket file on exiting
|
Clean up socket file on exiting
Change-Id: I34391c64408b5a35386913bd7be01d81feed61b6
|
Python
|
mit
|
fayoh/KSP-Control
|
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
+ os.remove('/tmp/coord.socket')
loop.close()
|
Clean up socket file on exiting
|
## Code Before:
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
loop.close()
## Instruction:
Clean up socket file on exiting
## Code After:
import asyncio, signal, os
from blink import blink
import ipc.coordinator
loop = asyncio.get_event_loop()
def my_interrupt_handler():
print('Stopping')
for task in asyncio.Task.all_tasks():
task.cancel()
loop.stop()
loop.add_signal_handler(signal.SIGINT, my_interrupt_handler)
blink.start()
ipc.coordinator.start(loop)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
except asyncio.CancelledError:
print('Tasks has been canceled')
finally:
ipc.coordinator.stop()
os.remove('/tmp/coord.socket')
loop.close()
|
...
ipc.coordinator.stop()
os.remove('/tmp/coord.socket')
loop.close()
...
|
18374ff4e3906f704276bb0a7b5a5feae50875a2
|
aspy/yaml/__init__.py
|
aspy/yaml/__init__.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
import yaml
# Adapted from http://stackoverflow.com/a/21912744/812183
class OrderedLoader(yaml.loader.Loader):
pass
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
)
class OrderedDumper(yaml.dumper.SafeDumper):
pass
OrderedDumper.add_representer(
OrderedDict,
lambda dumper, data: dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items(),
),
)
def ordered_load(stream):
"""yaml.load which respects order for dictionaries in the yaml file.
:param stream: string or streamlike object.
"""
return yaml.load(stream, Loader=OrderedLoader)
def ordered_dump(obj, **kwargs):
"""yaml.dump which respects order for dictionaries in the yaml object.
:param obj: Yaml dumpable object
"""
return yaml.dump(obj, Dumper=OrderedDumper, **kwargs)
|
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
import yaml
# Adapted from http://stackoverflow.com/a/21912744/812183
class OrderedLoader(getattr(yaml, 'CSafeLoader', yaml.SafeLoader)):
pass
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
)
class OrderedDumper(getattr(yaml, 'CSafeDumper', yaml.SafeDumper)):
pass
OrderedDumper.add_representer(
OrderedDict,
lambda dumper, data: dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items(),
),
)
def ordered_load(stream):
"""yaml.load which respects order for dictionaries in the yaml file.
:param stream: string or streamlike object.
"""
return yaml.load(stream, Loader=OrderedLoader)
def ordered_dump(obj, **kwargs):
"""yaml.dump which respects order for dictionaries in the yaml object.
:param obj: Yaml dumpable object
"""
return yaml.dump(obj, Dumper=OrderedDumper, **kwargs)
|
Use the C Loader/Dumper when available
|
Use the C Loader/Dumper when available
|
Python
|
mit
|
asottile/aspy.yaml
|
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
import yaml
# Adapted from http://stackoverflow.com/a/21912744/812183
- class OrderedLoader(yaml.loader.Loader):
+ class OrderedLoader(getattr(yaml, 'CSafeLoader', yaml.SafeLoader)):
pass
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
)
- class OrderedDumper(yaml.dumper.SafeDumper):
+ class OrderedDumper(getattr(yaml, 'CSafeDumper', yaml.SafeDumper)):
pass
OrderedDumper.add_representer(
OrderedDict,
lambda dumper, data: dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items(),
),
)
def ordered_load(stream):
"""yaml.load which respects order for dictionaries in the yaml file.
:param stream: string or streamlike object.
"""
return yaml.load(stream, Loader=OrderedLoader)
def ordered_dump(obj, **kwargs):
"""yaml.dump which respects order for dictionaries in the yaml object.
:param obj: Yaml dumpable object
"""
return yaml.dump(obj, Dumper=OrderedDumper, **kwargs)
|
Use the C Loader/Dumper when available
|
## Code Before:
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
import yaml
# Adapted from http://stackoverflow.com/a/21912744/812183
class OrderedLoader(yaml.loader.Loader):
pass
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
)
class OrderedDumper(yaml.dumper.SafeDumper):
pass
OrderedDumper.add_representer(
OrderedDict,
lambda dumper, data: dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items(),
),
)
def ordered_load(stream):
"""yaml.load which respects order for dictionaries in the yaml file.
:param stream: string or streamlike object.
"""
return yaml.load(stream, Loader=OrderedLoader)
def ordered_dump(obj, **kwargs):
"""yaml.dump which respects order for dictionaries in the yaml object.
:param obj: Yaml dumpable object
"""
return yaml.dump(obj, Dumper=OrderedDumper, **kwargs)
## Instruction:
Use the C Loader/Dumper when available
## Code After:
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
import yaml
# Adapted from http://stackoverflow.com/a/21912744/812183
class OrderedLoader(getattr(yaml, 'CSafeLoader', yaml.SafeLoader)):
pass
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
lambda loader, node: OrderedDict(loader.construct_pairs(node)),
)
class OrderedDumper(getattr(yaml, 'CSafeDumper', yaml.SafeDumper)):
pass
OrderedDumper.add_representer(
OrderedDict,
lambda dumper, data: dumper.represent_mapping(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
data.items(),
),
)
def ordered_load(stream):
"""yaml.load which respects order for dictionaries in the yaml file.
:param stream: string or streamlike object.
"""
return yaml.load(stream, Loader=OrderedLoader)
def ordered_dump(obj, **kwargs):
"""yaml.dump which respects order for dictionaries in the yaml object.
:param obj: Yaml dumpable object
"""
return yaml.dump(obj, Dumper=OrderedDumper, **kwargs)
|
# ... existing code ...
class OrderedLoader(getattr(yaml, 'CSafeLoader', yaml.SafeLoader)):
pass
# ... modified code ...
class OrderedDumper(getattr(yaml, 'CSafeDumper', yaml.SafeDumper)):
pass
# ... rest of the code ...
|
eea1ba0273b8e5362f6b27854e29e6053555fb2a
|
gittip/cli.py
|
gittip/cli.py
|
from gittip import wireup
def payday():
db = wireup.db()
wireup.billing()
wireup.nanswers()
# Lazily import the billing module.
# =================================
# This dodges a problem where db in billing is None if we import it from
# gittip before calling wireup.billing.
from gittip.billing.payday import Payday
try:
Payday(db).run()
except KeyboardInterrupt:
pass
except:
import aspen
import traceback
aspen.log(traceback.format_exc())
|
import os
from gittip import wireup
def payday():
# Wire things up.
# ===============
# Manually override max db connections so that we only have one connection.
# Our db access is serialized right now anyway, and with only one
# connection it's easier to trust changes to statement_timeout. The point
# here is that we want to turn off statement_timeout for payday.
os.environ['DATABASE_MAXCONN'] = '1'
db = wireup.db()
db.run("SET statement_timeout = 0")
wireup.billing()
wireup.nanswers()
# Lazily import the billing module.
# =================================
# This dodges a problem where db in billing is None if we import it from
# gittip before calling wireup.billing.
from gittip.billing.payday import Payday
try:
Payday(db).run()
except KeyboardInterrupt:
pass
except:
import aspen
import traceback
aspen.log(traceback.format_exc())
|
Configure payday for no db timeout
|
Configure payday for no db timeout
|
Python
|
mit
|
mccolgst/www.gittip.com,studio666/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,studio666/gratipay.com
|
+ import os
from gittip import wireup
def payday():
+
+ # Wire things up.
+ # ===============
+ # Manually override max db connections so that we only have one connection.
+ # Our db access is serialized right now anyway, and with only one
+ # connection it's easier to trust changes to statement_timeout. The point
+ # here is that we want to turn off statement_timeout for payday.
+
+ os.environ['DATABASE_MAXCONN'] = '1'
db = wireup.db()
+ db.run("SET statement_timeout = 0")
+
wireup.billing()
wireup.nanswers()
# Lazily import the billing module.
# =================================
# This dodges a problem where db in billing is None if we import it from
# gittip before calling wireup.billing.
from gittip.billing.payday import Payday
try:
Payday(db).run()
except KeyboardInterrupt:
pass
except:
import aspen
import traceback
aspen.log(traceback.format_exc())
|
Configure payday for no db timeout
|
## Code Before:
from gittip import wireup
def payday():
db = wireup.db()
wireup.billing()
wireup.nanswers()
# Lazily import the billing module.
# =================================
# This dodges a problem where db in billing is None if we import it from
# gittip before calling wireup.billing.
from gittip.billing.payday import Payday
try:
Payday(db).run()
except KeyboardInterrupt:
pass
except:
import aspen
import traceback
aspen.log(traceback.format_exc())
## Instruction:
Configure payday for no db timeout
## Code After:
import os
from gittip import wireup
def payday():
# Wire things up.
# ===============
# Manually override max db connections so that we only have one connection.
# Our db access is serialized right now anyway, and with only one
# connection it's easier to trust changes to statement_timeout. The point
# here is that we want to turn off statement_timeout for payday.
os.environ['DATABASE_MAXCONN'] = '1'
db = wireup.db()
db.run("SET statement_timeout = 0")
wireup.billing()
wireup.nanswers()
# Lazily import the billing module.
# =================================
# This dodges a problem where db in billing is None if we import it from
# gittip before calling wireup.billing.
from gittip.billing.payday import Payday
try:
Payday(db).run()
except KeyboardInterrupt:
pass
except:
import aspen
import traceback
aspen.log(traceback.format_exc())
|
// ... existing code ...
import os
from gittip import wireup
// ... modified code ...
def payday():
# Wire things up.
# ===============
# Manually override max db connections so that we only have one connection.
# Our db access is serialized right now anyway, and with only one
# connection it's easier to trust changes to statement_timeout. The point
# here is that we want to turn off statement_timeout for payday.
os.environ['DATABASE_MAXCONN'] = '1'
db = wireup.db()
db.run("SET statement_timeout = 0")
wireup.billing()
// ... rest of the code ...
|
31b8ecda3d6b34428180b45e49489ebefc8a57e3
|
tests/test_util.py
|
tests/test_util.py
|
from unittest import TestCase
from pytest import deprecated_call
from w3lib.util import str_to_unicode, to_native_str, unicode_to_str
class StrToUnicodeTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
str_to_unicode('')
class ToNativeStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
to_native_str('')
class UnicodeToStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
unicode_to_str('')
|
from unittest import TestCase
from pytest import deprecated_call, raises
from w3lib.util import (
str_to_unicode,
to_bytes,
to_native_str,
to_unicode,
unicode_to_str,
)
class StrToUnicodeTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
str_to_unicode('')
class ToBytesTestCase(TestCase):
def test_type_error(self):
with raises(TypeError):
to_bytes(True)
class ToNativeStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
to_native_str('')
class ToUnicodeTestCase(TestCase):
def test_type_error(self):
with raises(TypeError):
to_unicode(True)
class UnicodeToStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
unicode_to_str('')
|
Test the TypeError of to_bytes() and to_unicode()
|
Test the TypeError of to_bytes() and to_unicode()
|
Python
|
bsd-3-clause
|
scrapy/w3lib
|
from unittest import TestCase
- from pytest import deprecated_call
+ from pytest import deprecated_call, raises
- from w3lib.util import str_to_unicode, to_native_str, unicode_to_str
+ from w3lib.util import (
+ str_to_unicode,
+ to_bytes,
+ to_native_str,
+ to_unicode,
+ unicode_to_str,
+ )
class StrToUnicodeTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
str_to_unicode('')
+ class ToBytesTestCase(TestCase):
+
+ def test_type_error(self):
+ with raises(TypeError):
+ to_bytes(True)
+
+
class ToNativeStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
to_native_str('')
+
+
+ class ToUnicodeTestCase(TestCase):
+
+ def test_type_error(self):
+ with raises(TypeError):
+ to_unicode(True)
class UnicodeToStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
unicode_to_str('')
|
Test the TypeError of to_bytes() and to_unicode()
|
## Code Before:
from unittest import TestCase
from pytest import deprecated_call
from w3lib.util import str_to_unicode, to_native_str, unicode_to_str
class StrToUnicodeTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
str_to_unicode('')
class ToNativeStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
to_native_str('')
class UnicodeToStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
unicode_to_str('')
## Instruction:
Test the TypeError of to_bytes() and to_unicode()
## Code After:
from unittest import TestCase
from pytest import deprecated_call, raises
from w3lib.util import (
str_to_unicode,
to_bytes,
to_native_str,
to_unicode,
unicode_to_str,
)
class StrToUnicodeTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
str_to_unicode('')
class ToBytesTestCase(TestCase):
def test_type_error(self):
with raises(TypeError):
to_bytes(True)
class ToNativeStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
to_native_str('')
class ToUnicodeTestCase(TestCase):
def test_type_error(self):
with raises(TypeError):
to_unicode(True)
class UnicodeToStrTestCase(TestCase):
def test_deprecation(self):
with deprecated_call():
unicode_to_str('')
|
# ... existing code ...
from pytest import deprecated_call, raises
from w3lib.util import (
str_to_unicode,
to_bytes,
to_native_str,
to_unicode,
unicode_to_str,
)
# ... modified code ...
class ToBytesTestCase(TestCase):
def test_type_error(self):
with raises(TypeError):
to_bytes(True)
class ToNativeStrTestCase(TestCase):
...
class ToUnicodeTestCase(TestCase):
def test_type_error(self):
with raises(TypeError):
to_unicode(True)
class UnicodeToStrTestCase(TestCase):
# ... rest of the code ...
|
c347e6e763b79a9c4af6d7776093ce9ed711c43d
|
monkeys/release.py
|
monkeys/release.py
|
from invoke import task, run
@task
def makerelease(ctx, version, local_only=False):
if not version:
raise Exception("You must specify a version!")
# FoodTruck assets.
print("Update node modules")
run("npm install")
print("Generating Wikked assets")
run("gulp")
if not local_only:
# Tag in Mercurial, which will then be used for PyPi version.
run("hg tag %s" % version)
# PyPi upload.
run("python setup.py sdist upload")
else:
print("Would tag repo with %s..." % version)
print("Would upload to PyPi...")
|
from invoke import task, run
@task
def makerelease(ctx, version, local_only=False):
if not version:
raise Exception("You must specify a version!")
# FoodTruck assets.
print("Update node modules")
run("npm install")
print("Generating Wikked assets")
run("gulp")
if not local_only:
# Tag in Mercurial, which will then be used for PyPi version.
run("hg tag %s" % version)
# PyPi upload.
run("python setup.py sdist bdist_wheel")
run("twine upload dist/Wikked-%s.tar.gz" % version)
else:
print("Would tag repo with %s..." % version)
print("Would upload to PyPi...")
|
Use `twine` to deploy Wikked to Pypi.
|
cm: Use `twine` to deploy Wikked to Pypi.
|
Python
|
apache-2.0
|
ludovicchabant/Wikked,ludovicchabant/Wikked,ludovicchabant/Wikked
|
from invoke import task, run
@task
def makerelease(ctx, version, local_only=False):
if not version:
raise Exception("You must specify a version!")
# FoodTruck assets.
print("Update node modules")
run("npm install")
print("Generating Wikked assets")
run("gulp")
if not local_only:
# Tag in Mercurial, which will then be used for PyPi version.
run("hg tag %s" % version)
# PyPi upload.
- run("python setup.py sdist upload")
+ run("python setup.py sdist bdist_wheel")
+ run("twine upload dist/Wikked-%s.tar.gz" % version)
else:
print("Would tag repo with %s..." % version)
print("Would upload to PyPi...")
|
Use `twine` to deploy Wikked to Pypi.
|
## Code Before:
from invoke import task, run
@task
def makerelease(ctx, version, local_only=False):
if not version:
raise Exception("You must specify a version!")
# FoodTruck assets.
print("Update node modules")
run("npm install")
print("Generating Wikked assets")
run("gulp")
if not local_only:
# Tag in Mercurial, which will then be used for PyPi version.
run("hg tag %s" % version)
# PyPi upload.
run("python setup.py sdist upload")
else:
print("Would tag repo with %s..." % version)
print("Would upload to PyPi...")
## Instruction:
Use `twine` to deploy Wikked to Pypi.
## Code After:
from invoke import task, run
@task
def makerelease(ctx, version, local_only=False):
if not version:
raise Exception("You must specify a version!")
# FoodTruck assets.
print("Update node modules")
run("npm install")
print("Generating Wikked assets")
run("gulp")
if not local_only:
# Tag in Mercurial, which will then be used for PyPi version.
run("hg tag %s" % version)
# PyPi upload.
run("python setup.py sdist bdist_wheel")
run("twine upload dist/Wikked-%s.tar.gz" % version)
else:
print("Would tag repo with %s..." % version)
print("Would upload to PyPi...")
|
# ... existing code ...
# PyPi upload.
run("python setup.py sdist bdist_wheel")
run("twine upload dist/Wikked-%s.tar.gz" % version)
else:
# ... rest of the code ...
|
9e131c863c7ff147b95a016b0dfd52c03c60341e
|
tests/test_cmd_write.py
|
tests/test_cmd_write.py
|
from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
os.chdir("test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
|
from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
test_root_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(test_root_dir + "/test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
|
Fix running tests form command line
|
Fix running tests form command line
|
Python
|
mit
|
rzhilkibaev/cfgen
|
from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
- os.chdir("test_dir")
+ test_root_dir = os.path.dirname(os.path.abspath(__file__))
+ os.chdir(test_root_dir + "/test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
|
Fix running tests form command line
|
## Code Before:
from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
os.chdir("test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
## Instruction:
Fix running tests form command line
## Code After:
from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
test_root_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(test_root_dir + "/test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
|
# ... existing code ...
def setup():
test_root_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(test_root_dir + "/test_dir")
clean()
# ... rest of the code ...
|
5e66b10c3f99e683ffbab1c074583436dd791901
|
tests/test_runner.py
|
tests/test_runner.py
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
Use normal function instead of lambda for this
|
Use normal function instead of lambda for this
|
Python
|
mit
|
CodersOfTheNight/oshino
|
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
+ def error_stub():
+ raise RuntimeError("Simply failing")
+
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
- @patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
+ @patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
Use normal function instead of lambda for this
|
## Code Before:
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", lambda: raise RuntimeError("Simply failing"))
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
## Instruction:
Use normal function instead of lambda for this
## Code After:
import asyncio
from pytest import mark, raises
from oshino.run import main
from mock import patch
def create_loop():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def error_stub():
raise RuntimeError("Simply failing")
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
def test_startup():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
@mark.integration
@patch("oshino.core.heart.forever", lambda: False)
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
with raises(SystemExit):
main(("--config", "tests/data/test_config.yml", "--noop"))
|
# ... existing code ...
def error_stub():
raise RuntimeError("Simply failing")
# ... modified code ...
@patch("oshino.core.heart.create_loop", create_loop)
@patch("dotenv.find_dotenv", error_stub)
def test_dot_env_fail():
# ... rest of the code ...
|
432a7f72c790ca7ba18f4d575706461e337da593
|
src/hunter/const.py
|
src/hunter/const.py
|
import os
import site
import stat
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
import os
import site
import stat
import sys
import sysconfig
SITE_PACKAGES_PATHS = set()
for scheme in sysconfig.get_scheme_names():
for name in ['platlib', 'purelib']:
try:
SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
except KeyError:
pass
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
if sys.version_info >= (3, 10):
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
Use new method to get package paths that works without deprecations on Python 3.10
|
Use new method to get package paths that works without deprecations on Python 3.10
|
Python
|
bsd-2-clause
|
ionelmc/python-hunter
|
import os
import site
import stat
import sys
- from distutils.sysconfig import get_python_lib
+ import sysconfig
SITE_PACKAGES_PATHS = set()
+ for scheme in sysconfig.get_scheme_names():
+ for name in ['platlib', 'purelib']:
+ try:
+ SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
+ except KeyError:
+ pass
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
+ if sys.version_info >= (3, 10):
+ from distutils.sysconfig import get_python_lib
+
- SITE_PACKAGES_PATHS.add(get_python_lib())
+ SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
Use new method to get package paths that works without deprecations on Python 3.10
|
## Code Before:
import os
import site
import stat
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
## Instruction:
Use new method to get package paths that works without deprecations on Python 3.10
## Code After:
import os
import site
import stat
import sys
import sysconfig
SITE_PACKAGES_PATHS = set()
for scheme in sysconfig.get_scheme_names():
for name in ['platlib', 'purelib']:
try:
SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
except KeyError:
pass
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
if sys.version_info >= (3, 10):
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
'<frozen zipimport>',
'<frozen importlib._bootstrap>',
'<frozen importlib._bootstrap_external>',
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(stat.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
...
import sys
import sysconfig
...
SITE_PACKAGES_PATHS = set()
for scheme in sysconfig.get_scheme_names():
for name in ['platlib', 'purelib']:
try:
SITE_PACKAGES_PATHS.add(sysconfig.get_path(name, scheme))
except KeyError:
pass
if hasattr(site, 'getsitepackages'):
...
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
if sys.version_info >= (3, 10):
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS.add(os.path.dirname(os.path.dirname(__file__)))
...
|
dc40793ad27704c83dbbd2e923bf0cbcd7cb00ed
|
polyaxon/event_manager/event_service.py
|
polyaxon/event_manager/event_service.py
|
from libs.services import Service
class EventService(Service):
__all__ = ('record', 'setup')
event_manager = None
def can_handle(self, event_type):
return isinstance(event_type, str) and self.event_manager.knows(event_type)
def get_event(self, event_type, instance, **kwargs):
return self.event_manager.get(
event_type,
).from_instance(instance, **kwargs)
def record(self, event_type, instance=None, **kwargs):
""" Validate and record an event.
>>> record('event.action', object_instance)
"""
if not self.is_setup:
return
if not self.can_handle(event_type=event_type):
return
event = self.get_event(event_type=event_type, instance=instance, **kwargs)
self.record_event(event)
def record_event(self, event):
""" Record an event.
>>> record_event(Event())
"""
pass
|
from libs.services import Service
class EventService(Service):
__all__ = ('record', 'setup')
event_manager = None
def can_handle(self, event_type):
return isinstance(event_type, str) and self.event_manager.knows(event_type)
def get_event(self, event_type, event_data=None, instance=None, **kwargs):
if instance or not event_data:
return self.event_manager.get(
event_type,
).from_instance(instance, **kwargs)
return self.event_manager.get(
event_type,
).from_event_data(event_data=event_data, **kwargs)
def record(self, event_type, event_data=None, instance=None, **kwargs):
""" Validate and record an event.
>>> record('event.action', object_instance)
"""
if not self.is_setup:
return
if not self.can_handle(event_type=event_type):
return
event = self.get_event(event_type=event_type,
event_data=event_data,
instance=instance,
**kwargs)
self.record_event(event)
return event
def record_event(self, event):
""" Record an event.
>>> record_event(Event())
"""
pass
|
Handle both event instanciation from object and from serialized events
|
Handle both event instanciation from object and from serialized events
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
from libs.services import Service
class EventService(Service):
__all__ = ('record', 'setup')
event_manager = None
def can_handle(self, event_type):
return isinstance(event_type, str) and self.event_manager.knows(event_type)
- def get_event(self, event_type, instance, **kwargs):
+ def get_event(self, event_type, event_data=None, instance=None, **kwargs):
+ if instance or not event_data:
+ return self.event_manager.get(
+ event_type,
+ ).from_instance(instance, **kwargs)
return self.event_manager.get(
event_type,
- ).from_instance(instance, **kwargs)
+ ).from_event_data(event_data=event_data, **kwargs)
- def record(self, event_type, instance=None, **kwargs):
+ def record(self, event_type, event_data=None, instance=None, **kwargs):
""" Validate and record an event.
>>> record('event.action', object_instance)
"""
if not self.is_setup:
return
if not self.can_handle(event_type=event_type):
return
- event = self.get_event(event_type=event_type, instance=instance, **kwargs)
+ event = self.get_event(event_type=event_type,
+ event_data=event_data,
+ instance=instance,
+ **kwargs)
self.record_event(event)
+ return event
def record_event(self, event):
""" Record an event.
>>> record_event(Event())
"""
pass
|
Handle both event instanciation from object and from serialized events
|
## Code Before:
from libs.services import Service
class EventService(Service):
__all__ = ('record', 'setup')
event_manager = None
def can_handle(self, event_type):
return isinstance(event_type, str) and self.event_manager.knows(event_type)
def get_event(self, event_type, instance, **kwargs):
return self.event_manager.get(
event_type,
).from_instance(instance, **kwargs)
def record(self, event_type, instance=None, **kwargs):
""" Validate and record an event.
>>> record('event.action', object_instance)
"""
if not self.is_setup:
return
if not self.can_handle(event_type=event_type):
return
event = self.get_event(event_type=event_type, instance=instance, **kwargs)
self.record_event(event)
def record_event(self, event):
""" Record an event.
>>> record_event(Event())
"""
pass
## Instruction:
Handle both event instanciation from object and from serialized events
## Code After:
from libs.services import Service
class EventService(Service):
__all__ = ('record', 'setup')
event_manager = None
def can_handle(self, event_type):
return isinstance(event_type, str) and self.event_manager.knows(event_type)
def get_event(self, event_type, event_data=None, instance=None, **kwargs):
if instance or not event_data:
return self.event_manager.get(
event_type,
).from_instance(instance, **kwargs)
return self.event_manager.get(
event_type,
).from_event_data(event_data=event_data, **kwargs)
def record(self, event_type, event_data=None, instance=None, **kwargs):
""" Validate and record an event.
>>> record('event.action', object_instance)
"""
if not self.is_setup:
return
if not self.can_handle(event_type=event_type):
return
event = self.get_event(event_type=event_type,
event_data=event_data,
instance=instance,
**kwargs)
self.record_event(event)
return event
def record_event(self, event):
""" Record an event.
>>> record_event(Event())
"""
pass
|
...
def get_event(self, event_type, event_data=None, instance=None, **kwargs):
if instance or not event_data:
return self.event_manager.get(
event_type,
).from_instance(instance, **kwargs)
return self.event_manager.get(
...
event_type,
).from_event_data(event_data=event_data, **kwargs)
def record(self, event_type, event_data=None, instance=None, **kwargs):
""" Validate and record an event.
...
event = self.get_event(event_type=event_type,
event_data=event_data,
instance=instance,
**kwargs)
self.record_event(event)
return event
...
|
5cf8f3326b6995a871df7f2b61b25ff529216103
|
recordpeeker/command_line.py
|
recordpeeker/command_line.py
|
import argparse
import os
import json
import sys
def parse_args(argv):
parser = argparse.ArgumentParser("Test")
parser.add_argument("--port", "-p", type=int, default=8080, help="Specify the port recordpeeker runs on")
parser.add_argument("--verbosity", "-v", default=0, type=int, choices=[0,1,2,3], help="Spews more info. 1: prints the path of each request. 2: prints the content of unknown requests. 3: Also print the content of known requests.")
return parser.parse_args(argv[1:])
def launch():
script = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mitmdump_input.py')
# This is just here so that --help returns the arguments
args = parse_args(sys.argv)
arglist = " ".join(sys.argv[1:])
sys.argv = [sys.argv[0], '-s "{0}" "{1}"'.format(script, arglist), '-q']
from libmproxy.main import mitmdump
mitmdump()
if __name__ == '__main__':
launch()
|
import argparse
import os
import json
import sys
def parse_args(argv):
parser = argparse.ArgumentParser("Test")
parser.add_argument("--port", "-p", type=int, default=8080, help="Specify the port recordpeeker runs on")
parser.add_argument("--verbosity", "-v", default=0, type=int, choices=[0,1,2,3], help="Spews more info. 1: prints the path of each request. 2: prints the content of unknown requests. 3: Also print the content of known requests.")
return parser.parse_args(argv[1:])
def launch():
script = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mitmdump_input.py')
# This is just here so that --help returns the arguments
args = parse_args(sys.argv)
if sys.argv[1:]:
arglist = " ".join(sys.argv[1:])
scriptargs = '-s "{0}" "{1}"'.format(script, arglist)
else:
scriptargs = '-s "{0}"'.format(script)
sys.argv = [sys.argv[0], scriptargs, '-q']
from libmproxy.main import mitmdump
mitmdump()
if __name__ == '__main__':
launch()
|
Fix bustage for script calls
|
Fix bustage for script calls
|
Python
|
mit
|
jonchang/recordpeeker
|
import argparse
import os
import json
import sys
def parse_args(argv):
parser = argparse.ArgumentParser("Test")
parser.add_argument("--port", "-p", type=int, default=8080, help="Specify the port recordpeeker runs on")
parser.add_argument("--verbosity", "-v", default=0, type=int, choices=[0,1,2,3], help="Spews more info. 1: prints the path of each request. 2: prints the content of unknown requests. 3: Also print the content of known requests.")
return parser.parse_args(argv[1:])
def launch():
script = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mitmdump_input.py')
# This is just here so that --help returns the arguments
args = parse_args(sys.argv)
+ if sys.argv[1:]:
- arglist = " ".join(sys.argv[1:])
+ arglist = " ".join(sys.argv[1:])
+ scriptargs = '-s "{0}" "{1}"'.format(script, arglist)
+ else:
+ scriptargs = '-s "{0}"'.format(script)
- sys.argv = [sys.argv[0], '-s "{0}" "{1}"'.format(script, arglist), '-q']
+ sys.argv = [sys.argv[0], scriptargs, '-q']
from libmproxy.main import mitmdump
mitmdump()
if __name__ == '__main__':
launch()
|
Fix bustage for script calls
|
## Code Before:
import argparse
import os
import json
import sys
def parse_args(argv):
parser = argparse.ArgumentParser("Test")
parser.add_argument("--port", "-p", type=int, default=8080, help="Specify the port recordpeeker runs on")
parser.add_argument("--verbosity", "-v", default=0, type=int, choices=[0,1,2,3], help="Spews more info. 1: prints the path of each request. 2: prints the content of unknown requests. 3: Also print the content of known requests.")
return parser.parse_args(argv[1:])
def launch():
script = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mitmdump_input.py')
# This is just here so that --help returns the arguments
args = parse_args(sys.argv)
arglist = " ".join(sys.argv[1:])
sys.argv = [sys.argv[0], '-s "{0}" "{1}"'.format(script, arglist), '-q']
from libmproxy.main import mitmdump
mitmdump()
if __name__ == '__main__':
launch()
## Instruction:
Fix bustage for script calls
## Code After:
import argparse
import os
import json
import sys
def parse_args(argv):
parser = argparse.ArgumentParser("Test")
parser.add_argument("--port", "-p", type=int, default=8080, help="Specify the port recordpeeker runs on")
parser.add_argument("--verbosity", "-v", default=0, type=int, choices=[0,1,2,3], help="Spews more info. 1: prints the path of each request. 2: prints the content of unknown requests. 3: Also print the content of known requests.")
return parser.parse_args(argv[1:])
def launch():
script = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mitmdump_input.py')
# This is just here so that --help returns the arguments
args = parse_args(sys.argv)
if sys.argv[1:]:
arglist = " ".join(sys.argv[1:])
scriptargs = '-s "{0}" "{1}"'.format(script, arglist)
else:
scriptargs = '-s "{0}"'.format(script)
sys.argv = [sys.argv[0], scriptargs, '-q']
from libmproxy.main import mitmdump
mitmdump()
if __name__ == '__main__':
launch()
|
...
args = parse_args(sys.argv)
if sys.argv[1:]:
arglist = " ".join(sys.argv[1:])
scriptargs = '-s "{0}" "{1}"'.format(script, arglist)
else:
scriptargs = '-s "{0}"'.format(script)
sys.argv = [sys.argv[0], scriptargs, '-q']
from libmproxy.main import mitmdump
...
|
30fae197ff6561a58df33868b3379a41d6a9d9dd
|
settings_test.py
|
settings_test.py
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <[email protected]>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <[email protected]>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
Add ENFORCE_PRIVACY to Travis testing settings.
|
Add ENFORCE_PRIVACY to Travis testing settings.
|
Python
|
agpl-3.0
|
geotagx/geotagx-pybossa-archive,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa,geotagx/geotagx-pybossa-archive,jean/pybossa,CulturePlex/pybossa,PyBossa/pybossa,geotagx/geotagx-pybossa-archive,CulturePlex/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,geotagx/pybossa,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,stefanhahmann/pybossa,CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,stefanhahmann/pybossa,proyectos-analizo-info/pybossa-analizo-info,harihpr/tweetclickers,geotagx/geotagx-pybossa-archive,geotagx/pybossa,Scifabric/pybossa
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <[email protected]>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
+ ENFORCE_PRIVACY = False
|
Add ENFORCE_PRIVACY to Travis testing settings.
|
## Code Before:
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <[email protected]>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
## Instruction:
Add ENFORCE_PRIVACY to Travis testing settings.
## Code After:
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <[email protected]>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
# ... existing code ...
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
# ... rest of the code ...
|
36da7bdc8402494b5ef3588289739e1696ad6002
|
docs/_ext/djangodummy/settings.py
|
docs/_ext/djangodummy/settings.py
|
STATIC_URL = '/static/'
|
STATIC_URL = '/static/'
# Avoid error for missing the secret key
SECRET_KEY = 'docs'
|
Fix autodoc support with Django 1.5
|
Fix autodoc support with Django 1.5
|
Python
|
apache-2.0
|
django-fluent/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents
|
STATIC_URL = '/static/'
+ # Avoid error for missing the secret key
+ SECRET_KEY = 'docs'
+
|
Fix autodoc support with Django 1.5
|
## Code Before:
STATIC_URL = '/static/'
## Instruction:
Fix autodoc support with Django 1.5
## Code After:
STATIC_URL = '/static/'
# Avoid error for missing the secret key
SECRET_KEY = 'docs'
|
# ... existing code ...
STATIC_URL = '/static/'
# Avoid error for missing the secret key
SECRET_KEY = 'docs'
# ... rest of the code ...
|
de42731ab97a7d4272c44cc750891906aa5b4417
|
buildlet/runner/ipythonparallel.py
|
buildlet/runner/ipythonparallel.py
|
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
|
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
Raise error if any in IPythonParallelRunner.wait_tasks
|
Raise error if any in IPythonParallelRunner.wait_tasks
|
Python
|
bsd-3-clause
|
tkf/buildlet
|
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
- self.view.wait(self.results.values())
+ for r in self.results.values():
+ r.get()
|
Raise error if any in IPythonParallelRunner.wait_tasks
|
## Code Before:
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
self.view.wait(self.results.values())
## Instruction:
Raise error if any in IPythonParallelRunner.wait_tasks
## Code After:
import IPython.parallel
from .simple import SimpleRunner
from .mixinparallel import MixInParallelRunner
class IPythonParallelRunner(MixInParallelRunner, SimpleRunner):
def submit_tasks(self):
self.client = IPython.parallel.Client()
self.view = view = self.client.load_balanced_view()
self.results = results = {}
for node in self.sorted_nodes():
deps = [results[n] for n in self.graph.predecessors(node)]
with view.temp_flags(after=deps):
results[node] = view.apply_async(self.run_func,
self.nodetaskmap[node])
def wait_tasks(self):
for r in self.results.values():
r.get()
|
# ... existing code ...
def wait_tasks(self):
for r in self.results.values():
r.get()
# ... rest of the code ...
|
9eabdbc6b73661865c4d785cbc57d7ee51fe59cd
|
future/tests/test_imports_urllib.py
|
future/tests/test_imports_urllib.py
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
"""
This should perhaps fail: importing urllib first means that the import hooks
won't be consulted when importing urllib.response.
"""
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
if __name__ == '__main__':
unittest.main()
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
import urllib
orig_file = urllib.__file__
from future.standard_library.urllib import response as urllib_response
self.assertEqual(orig_file, urllib.__file__)
print(urllib_response.__file__)
if __name__ == '__main__':
unittest.main()
|
Change urllib test to use an explicit import
|
Change urllib test to use an explicit import
|
Python
|
mit
|
QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future,krischer/python-future,krischer/python-future,QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future
|
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
- """
- This should perhaps fail: importing urllib first means that the import hooks
- won't be consulted when importing urllib.response.
- """
import urllib
- print(urllib.__file__)
+ orig_file = urllib.__file__
+ from future.standard_library.urllib import response as urllib_response
+ self.assertEqual(orig_file, urllib.__file__)
- from future import standard_library
- with standard_library.hooks():
- import urllib.response
- print(urllib.__file__)
- print(urllib.response.__file__)
+ print(urllib_response.__file__)
if __name__ == '__main__':
unittest.main()
|
Change urllib test to use an explicit import
|
## Code Before:
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
"""
This should perhaps fail: importing urllib first means that the import hooks
won't be consulted when importing urllib.response.
"""
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
if __name__ == '__main__':
unittest.main()
## Instruction:
Change urllib test to use an explicit import
## Code After:
from __future__ import absolute_import, print_function
import unittest
import sys
class ImportUrllibTest(unittest.TestCase):
def test_urllib(self):
import urllib
orig_file = urllib.__file__
from future.standard_library.urllib import response as urllib_response
self.assertEqual(orig_file, urllib.__file__)
print(urllib_response.__file__)
if __name__ == '__main__':
unittest.main()
|
# ... existing code ...
def test_urllib(self):
import urllib
orig_file = urllib.__file__
from future.standard_library.urllib import response as urllib_response
self.assertEqual(orig_file, urllib.__file__)
print(urllib_response.__file__)
# ... rest of the code ...
|
c046d7915c08221e4a84a01edf3ca08a27a931a8
|
opps/api/urls.py
|
opps/api/urls.py
|
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
|
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
|
Set emitter format json in api
|
Set emitter format json in api
|
Python
|
mit
|
williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps
|
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
- url(r'^container/$', container),
+ url(r'^container/$', container, {'emitter_format': 'json'}),
- url(r'^containerbox/$', containerbox),
+ url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
|
Set emitter format json in api
|
## Code Before:
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container),
url(r'^containerbox/$', containerbox),
)
## Instruction:
Set emitter format json in api
## Code After:
from django.conf.urls import patterns, url
from piston.resource import Resource
from opps.containers.api import ContainerHandler, ContainerBoxHandler
container = Resource(handler=ContainerHandler)
containerbox = Resource(handler=ContainerBoxHandler)
urlpatterns = patterns(
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
|
...
'',
url(r'^container/$', container, {'emitter_format': 'json'}),
url(r'^containerbox/$', containerbox, {'emitter_format': 'json'}),
)
...
|
b94f849fe28918a343a142da57b6055064d5b194
|
tests/test_abort_generate_on_hook_error.py
|
tests/test_abort_generate_on_hook_error.py
|
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
Test that an error in post_gen_project aborts generation
|
Test that an error in post_gen_project aborts generation
|
Python
|
bsd-3-clause
|
dajose/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,dajose/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,stevepiercy/cookiecutter,Springerle/cookiecutter,luzfcb/cookiecutter
|
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
+ @pytest.mark.usefixtures('clean_system')
+ def test_post_gen_hook(tmpdir):
+ context = {
+ 'cookiecutter': {
+ "repo_dir": "foobar",
+ "abort_pre_gen": "no",
+ "abort_post_gen": "yes"
+ }
+ }
+
+ with pytest.raises(exceptions.FailedHookException):
+ generate.generate_files(
+ repo_dir='tests/hooks-abort-render',
+ context=context,
+ output_dir=str(tmpdir)
+ )
+
+ assert not tmpdir.join('foobar').isdir()
+
|
Test that an error in post_gen_project aborts generation
|
## Code Before:
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
## Instruction:
Test that an error in post_gen_project aborts generation
## Code After:
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
# ... existing code ...
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
# ... rest of the code ...
|
e9bfe96cb3463fe99f08305aab44bd3d7556825a
|
api/radar_api/serializers/group_users.py
|
api/radar_api/serializers/group_users.py
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
Add managed roles to serializer
|
Add managed roles to serializer
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
+ managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
Add managed roles to serializer
|
## Code Before:
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
## Instruction:
Add managed roles to serializer
## Code After:
from radar_api.serializers.groups import GroupReferenceField
from radar_api.serializers.meta import MetaSerializerMixin
from radar_api.serializers.user_mixins import UserSerializerMixin
from radar.models.groups import GroupUser
from radar.roles import ROLE, ROLE_NAMES
from radar.serializers.fields import ListField, StringField
from radar.serializers.models import ModelSerializer
from radar.serializers.fields import LabelledEnumField
class GroupUserSerializer(UserSerializerMixin, MetaSerializerMixin, ModelSerializer):
group = GroupReferenceField()
role = LabelledEnumField(ROLE, ROLE_NAMES)
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['user_id', 'group_id']
|
...
permissions = ListField(StringField(), read_only=True)
managed_roles = ListField(StringField(), read_only=True)
...
|
5fbf410e0042c82e524b3b08276b2d628d00b3c6
|
stickytape/prelude.py
|
stickytape/prelude.py
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys
sys.path.insert(0, __stickytape_working_dir)
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys as __stickytape_sys
__stickytape_sys.path.insert(0, __stickytape_working_dir)
|
Undo accidental global leakage of sys
|
Undo accidental global leakage of sys
|
Python
|
bsd-2-clause
|
mwilliamson/stickytape
|
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
- import sys
+ import sys as __stickytape_sys
- sys.path.insert(0, __stickytape_working_dir)
+ __stickytape_sys.path.insert(0, __stickytape_working_dir)
|
Undo accidental global leakage of sys
|
## Code Before:
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys
sys.path.insert(0, __stickytape_working_dir)
## Instruction:
Undo accidental global leakage of sys
## Code After:
import contextlib as __stickytape_contextlib
@__stickytape_contextlib.contextmanager
def __stickytape_temporary_dir():
import tempfile
import shutil
dir_path = tempfile.mkdtemp()
try:
yield dir_path
finally:
shutil.rmtree(dir_path)
with __stickytape_temporary_dir() as __stickytape_working_dir:
def __stickytape_write_module(path, contents):
import os, os.path, errno
def make_package(path):
parts = path.split("/")
partial_path = __stickytape_working_dir
for part in parts:
partial_path = os.path.join(partial_path, part)
if not os.path.exists(partial_path):
os.mkdir(partial_path)
open(os.path.join(partial_path, "__init__.py"), "w").write("\n")
make_package(os.path.dirname(path))
full_path = os.path.join(__stickytape_working_dir, path)
with open(full_path, "w") as module_file:
module_file.write(contents)
import sys as __stickytape_sys
__stickytape_sys.path.insert(0, __stickytape_working_dir)
|
# ... existing code ...
import sys as __stickytape_sys
__stickytape_sys.path.insert(0, __stickytape_working_dir)
# ... rest of the code ...
|
c730184a6ec826f9773fa4130e59121c0fd06e4d
|
api_v3/misc/oauth2.py
|
api_v3/misc/oauth2.py
|
from urlparse import urljoin
from django.conf import settings
import jwt
from social_core.backends.oauth import BaseOAuth2
class KeycloakOAuth2(BaseOAuth2):
"""Keycloak OAuth authentication backend"""
name = 'keycloak'
ID_KEY = 'email'
BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE
USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo')
AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth')
ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token')
ACCESS_TOKEN_METHOD = 'POST'
def get_user_details(self, response):
clients = response.get('resource_access', {})
client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {})
roles = set(client.get('roles', []))
return {
'username': response.get('preferred_username'),
'email': response.get('email'),
'first_name': response.get('given_name'),
'last_name': response.get('family_name'),
'is_staff': 'staff' in roles,
'is_superuser': 'superuser' in roles,
}
def user_data(self, access_token, *args, **kwargs):
return jwt.decode(access_token, verify=False)
def activate_user(backend, user, response, *args, **kwargs):
user.is_active = True
user.save()
|
from urlparse import urljoin
from django.conf import settings
import jwt
from social_core.backends.oauth import BaseOAuth2
class KeycloakOAuth2(BaseOAuth2):
"""Keycloak OAuth authentication backend"""
name = 'keycloak'
ID_KEY = 'email'
BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE
USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo')
AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth')
ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token')
ACCESS_TOKEN_METHOD = 'POST'
def get_user_details(self, response):
clients = response.get('resource_access', {})
client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {})
roles = set(client.get('roles', []))
return {
'email': response.get('email'),
'first_name': response.get('given_name'),
'last_name': response.get('family_name'),
'is_staff': 'staff' in roles,
'is_superuser': 'superuser' in roles,
}
def user_data(self, access_token, *args, **kwargs):
return jwt.decode(access_token, verify=False)
def activate_user(backend, user, response, *args, **kwargs):
user.is_active = True
user.save()
|
Remove `username` from keycloack payload.
|
Remove `username` from keycloack payload.
|
Python
|
mit
|
occrp/id-backend
|
from urlparse import urljoin
from django.conf import settings
import jwt
from social_core.backends.oauth import BaseOAuth2
class KeycloakOAuth2(BaseOAuth2):
"""Keycloak OAuth authentication backend"""
name = 'keycloak'
ID_KEY = 'email'
BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE
USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo')
AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth')
ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token')
ACCESS_TOKEN_METHOD = 'POST'
def get_user_details(self, response):
clients = response.get('resource_access', {})
client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {})
roles = set(client.get('roles', []))
return {
- 'username': response.get('preferred_username'),
'email': response.get('email'),
'first_name': response.get('given_name'),
'last_name': response.get('family_name'),
'is_staff': 'staff' in roles,
'is_superuser': 'superuser' in roles,
}
def user_data(self, access_token, *args, **kwargs):
return jwt.decode(access_token, verify=False)
def activate_user(backend, user, response, *args, **kwargs):
user.is_active = True
user.save()
|
Remove `username` from keycloack payload.
|
## Code Before:
from urlparse import urljoin
from django.conf import settings
import jwt
from social_core.backends.oauth import BaseOAuth2
class KeycloakOAuth2(BaseOAuth2):
"""Keycloak OAuth authentication backend"""
name = 'keycloak'
ID_KEY = 'email'
BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE
USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo')
AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth')
ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token')
ACCESS_TOKEN_METHOD = 'POST'
def get_user_details(self, response):
clients = response.get('resource_access', {})
client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {})
roles = set(client.get('roles', []))
return {
'username': response.get('preferred_username'),
'email': response.get('email'),
'first_name': response.get('given_name'),
'last_name': response.get('family_name'),
'is_staff': 'staff' in roles,
'is_superuser': 'superuser' in roles,
}
def user_data(self, access_token, *args, **kwargs):
return jwt.decode(access_token, verify=False)
def activate_user(backend, user, response, *args, **kwargs):
user.is_active = True
user.save()
## Instruction:
Remove `username` from keycloack payload.
## Code After:
from urlparse import urljoin
from django.conf import settings
import jwt
from social_core.backends.oauth import BaseOAuth2
class KeycloakOAuth2(BaseOAuth2):
"""Keycloak OAuth authentication backend"""
name = 'keycloak'
ID_KEY = 'email'
BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE
USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo')
AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth')
ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token')
ACCESS_TOKEN_METHOD = 'POST'
def get_user_details(self, response):
clients = response.get('resource_access', {})
client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {})
roles = set(client.get('roles', []))
return {
'email': response.get('email'),
'first_name': response.get('given_name'),
'last_name': response.get('family_name'),
'is_staff': 'staff' in roles,
'is_superuser': 'superuser' in roles,
}
def user_data(self, access_token, *args, **kwargs):
return jwt.decode(access_token, verify=False)
def activate_user(backend, user, response, *args, **kwargs):
user.is_active = True
user.save()
|
# ... existing code ...
return {
'email': response.get('email'),
# ... rest of the code ...
|
463fa89c143cd4493ea3704f177c5aba0ebb2af7
|
idiokit/xmpp/_resolve.py
|
idiokit/xmpp/_resolve.py
|
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port_and_count(port):
count = 0
while True:
try:
family, ip = yield idiokit.next()
except StopIteration:
idiokit.stop(count)
yield idiokit.send(family, ip, port)
count += 1
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port_and_count(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except dns.ResponseError:
srv_records = []
srv_count = 0
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
srv_count += yield _resolve_host(srv_record.target, port)
if srv_count == 0:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
|
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port(port):
while True:
family, ip = yield idiokit.next()
yield idiokit.send(family, ip, port)
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
if not srv_records:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
return
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
yield _resolve_host(srv_record.target, port)
|
Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
|
idiokit.xmpp: Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
|
Python
|
mit
|
abusesa/idiokit
|
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
- def _add_port_and_count(port):
+ def _add_port(port):
- count = 0
-
while True:
- try:
- family, ip = yield idiokit.next()
+ family, ip = yield idiokit.next()
- except StopIteration:
- idiokit.stop(count)
-
yield idiokit.send(family, ip, port)
- count += 1
def _resolve_host(host, port):
- return dns.host_lookup(host) | _add_port_and_count(port)
+ return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
- except dns.ResponseError:
+ except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
- srv_count = 0
+ if not srv_records:
+ port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
+ yield _resolve_host(domain, port)
+ return
+
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
- srv_count += yield _resolve_host(srv_record.target, port)
+ yield _resolve_host(srv_record.target, port)
- if srv_count == 0:
- port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
- yield _resolve_host(domain, port)
-
|
Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
|
## Code Before:
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port_and_count(port):
count = 0
while True:
try:
family, ip = yield idiokit.next()
except StopIteration:
idiokit.stop(count)
yield idiokit.send(family, ip, port)
count += 1
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port_and_count(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except dns.ResponseError:
srv_records = []
srv_count = 0
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
srv_count += yield _resolve_host(srv_record.target, port)
if srv_count == 0:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
## Instruction:
Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
## Code After:
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port(port):
while True:
family, ip = yield idiokit.next()
yield idiokit.send(family, ip, port)
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
if not srv_records:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
return
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
yield _resolve_host(srv_record.target, port)
|
...
@idiokit.stream
def _add_port(port):
while True:
family, ip = yield idiokit.next()
yield idiokit.send(family, ip, port)
...
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port(port)
...
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
...
if not srv_records:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
return
for srv_record in dns.ordered_srv_records(srv_records):
...
port = srv_record.port if forced_port is None else forced_port
yield _resolve_host(srv_record.target, port)
...
|
95fbbe9bac94e171424cb8ee23a675a70607fb62
|
tests/test_constants.py
|
tests/test_constants.py
|
from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
|
from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
|
Fix import order picked up by isort
|
Fix import order picked up by isort
|
Python
|
mit
|
springload/draftjs_exporter,springload/draftjs_exporter,springload/draftjs_exporter
|
from __future__ import absolute_import, unicode_literals
import unittest
- from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
+ from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
|
Fix import order picked up by isort
|
## Code Before:
from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
## Instruction:
Fix import order picked up by isort
## Code After:
from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
|
# ... existing code ...
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
# ... rest of the code ...
|
c6837af1af2939965976bfb45099bf7c2407a9da
|
twitter_api/middleware/ghetto_oauth.py
|
twitter_api/middleware/ghetto_oauth.py
|
from django.contrib.auth.models import User
import re
class GhettoOAuthMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
user_id = None
if 'HTTP_AUTHORIZATION' in request.META and request.META['HTTP_AUTHORIZATION'].startswith('OAuth'):
m = re.search(r'oauth_token="(\d+)"',
request.META['HTTP_AUTHORIZATION'])
if m:
user_id = m.group(1)
if 'oauth_token' in request.GET:
user_id = request.GET['oauth_token']
if user_id:
request.user = User.objects.get(pk=user_id)
return view_func(request, *view_args, **view_kwargs)
|
from django.contrib.auth.models import User
import re
class GhettoOAuthMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
user_id = self._get_token_from_header(request, 'HTTP_AUTHORIZATION')
if not user_id:
user_id = self._get_token_from_header(request, 'HTTP_X_VERIFY_CREDENTIALS_AUTHORIZATION')
if 'oauth_token' in request.GET:
user_id = request.GET['oauth_token']
if user_id:
request.user = User.objects.get(pk=user_id)
return view_func(request, *view_args, **view_kwargs)
def _get_token_from_header(self, request, header):
if header in request.META and request.META[header].startswith('OAuth'):
m = re.search(r'oauth_token="(\d+)"', request.META[header])
if m:
return m.group(1)
|
Add more HTTP headers to GhettoOauth
|
Add more HTTP headers to GhettoOauth
The official iPhone Twitter client uses
HTTP_X_VERIFY_CREDENTIALS_AUTHORIZATION when it's
connecting to image upload services.
|
Python
|
bsd-2-clause
|
simonw/bugle_project,devfort/bugle,simonw/bugle_project,devfort/bugle,devfort/bugle
|
from django.contrib.auth.models import User
import re
class GhettoOAuthMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
+ user_id = self._get_token_from_header(request, 'HTTP_AUTHORIZATION')
+ if not user_id:
+ user_id = self._get_token_from_header(request, 'HTTP_X_VERIFY_CREDENTIALS_AUTHORIZATION')
+
- user_id = None
- if 'HTTP_AUTHORIZATION' in request.META and request.META['HTTP_AUTHORIZATION'].startswith('OAuth'):
- m = re.search(r'oauth_token="(\d+)"',
- request.META['HTTP_AUTHORIZATION'])
- if m:
- user_id = m.group(1)
if 'oauth_token' in request.GET:
user_id = request.GET['oauth_token']
if user_id:
request.user = User.objects.get(pk=user_id)
return view_func(request, *view_args, **view_kwargs)
+
+ def _get_token_from_header(self, request, header):
+ if header in request.META and request.META[header].startswith('OAuth'):
+ m = re.search(r'oauth_token="(\d+)"', request.META[header])
+ if m:
+ return m.group(1)
+
|
Add more HTTP headers to GhettoOauth
|
## Code Before:
from django.contrib.auth.models import User
import re
class GhettoOAuthMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
user_id = None
if 'HTTP_AUTHORIZATION' in request.META and request.META['HTTP_AUTHORIZATION'].startswith('OAuth'):
m = re.search(r'oauth_token="(\d+)"',
request.META['HTTP_AUTHORIZATION'])
if m:
user_id = m.group(1)
if 'oauth_token' in request.GET:
user_id = request.GET['oauth_token']
if user_id:
request.user = User.objects.get(pk=user_id)
return view_func(request, *view_args, **view_kwargs)
## Instruction:
Add more HTTP headers to GhettoOauth
## Code After:
from django.contrib.auth.models import User
import re
class GhettoOAuthMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
user_id = self._get_token_from_header(request, 'HTTP_AUTHORIZATION')
if not user_id:
user_id = self._get_token_from_header(request, 'HTTP_X_VERIFY_CREDENTIALS_AUTHORIZATION')
if 'oauth_token' in request.GET:
user_id = request.GET['oauth_token']
if user_id:
request.user = User.objects.get(pk=user_id)
return view_func(request, *view_args, **view_kwargs)
def _get_token_from_header(self, request, header):
if header in request.META and request.META[header].startswith('OAuth'):
m = re.search(r'oauth_token="(\d+)"', request.META[header])
if m:
return m.group(1)
|
// ... existing code ...
def process_view(self, request, view_func, view_args, view_kwargs):
user_id = self._get_token_from_header(request, 'HTTP_AUTHORIZATION')
if not user_id:
user_id = self._get_token_from_header(request, 'HTTP_X_VERIFY_CREDENTIALS_AUTHORIZATION')
if 'oauth_token' in request.GET:
// ... modified code ...
return view_func(request, *view_args, **view_kwargs)
def _get_token_from_header(self, request, header):
if header in request.META and request.META[header].startswith('OAuth'):
m = re.search(r'oauth_token="(\d+)"', request.META[header])
if m:
return m.group(1)
// ... rest of the code ...
|
c5a1eab4cc08e26d852cc9e1f73478c65174af3c
|
students/psbriant/final_project/test_clean_data.py
|
students/psbriant/final_project/test_clean_data.py
|
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
To Do: fix test that fails
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
|
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
To Do: fix test that fails
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
def test_user_interface():
"""
Tests user interface.
"""
|
Add empty test function for user interface.
|
Add empty test function for user interface.
|
Python
|
unlicense
|
UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016
|
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
To Do: fix test that fails
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
+
+ def test_user_interface():
+ """
+ Tests user interface.
+ """
+
|
Add empty test function for user interface.
|
## Code Before:
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
To Do: fix test that fails
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
## Instruction:
Add empty test function for user interface.
## Code After:
import clean_data as cd
import matplotlib.pyplot as plt
import pandas
import pytest
def get_data():
"""
Retrieve data from csv file to test.
"""
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
return data
def test_clean():
"""
"""
def test_rename_columns():
"""
Test whether rename_columns successfully renames each column.
"""
data = get_data()
data = data.drop(["Date Value"], axis=1)
column_names = list(data.columns.values)
column_list = cd.rename_columns(column_names)
assert column_list[0:5] == ["Date", "90001", "90002", "90003", "90004"]
def test_find_low_water_use():
"""
"""
@pytest.mark.mpl_image_compare
def test_plot_zipcode():
"""
Test zipcode based wateruse graphs.
To Do: fix test that fails
"""
data = get_data()
cleaned = cd.clean(data)
wateruse = cd.plot_zipcode(cleaned, "90012")
return wateruse
def test_user_interface():
"""
Tests user interface.
"""
|
// ... existing code ...
return wateruse
def test_user_interface():
"""
Tests user interface.
"""
// ... rest of the code ...
|
caf6514b6af278583d9816b722fab9456d0ad9f1
|
pelicanconf.py
|
pelicanconf.py
|
from __future__ import unicode_literals
AUTHOR = u'DLR'
SITENAME = u'RCE'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
DEFAULT_DATE_FORMAT = '%a %d %B %Y'
THEME = 'themes/polar'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Simulation and Software Technology', 'https://www.dlr.de/sc'),
('Imprint', '/pages/imprint.html'),
('Privacy', '/pages/privacy.html'),)
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/RCEnvironment'),
('YouTube', 'https://www.youtube.com/user/rcenvironment'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# Static paths
STATIC_PATHS = ['images', 'pages/images', 'extra/CNAME']
# Plugins
PLUGIN_PATHS = ["plugins", "d:\\rce\\plugins"]
PLUGINS = ['pelican-page-hierarchy.page_hierarchy',]
# Github pages domain name
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
|
from __future__ import unicode_literals
AUTHOR = u'DLR'
SITENAME = u'RCE'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
DEFAULT_DATE_FORMAT = '%a %d %B %Y'
THEME = 'themes/polar'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Institute for Software Technology', 'https://www.dlr.de/sc'),
('Imprint', '/pages/imprint.html'),
('Privacy', '/pages/privacy.html'),)
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/RCEnvironment'),
('YouTube', 'https://www.youtube.com/user/rcenvironment'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# Static paths
STATIC_PATHS = ['images', 'pages/images', 'extra/CNAME']
# Plugins
PLUGIN_PATHS = ["plugins", "d:\\rce\\plugins"]
PLUGINS = ['pelican-page-hierarchy.page_hierarchy',]
# Github pages domain name
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
|
Update link to institute in footer to say "Institute for Software Technology"
|
Update link to institute in footer to say "Institute for Software Technology"
|
Python
|
cc0-1.0
|
DLR-SC/rce-website,DLR-SC/rce-website,DLR-SC/rce-website,DLR-SC/rce-website,DLR-SC/rce-website
|
from __future__ import unicode_literals
AUTHOR = u'DLR'
SITENAME = u'RCE'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
DEFAULT_DATE_FORMAT = '%a %d %B %Y'
THEME = 'themes/polar'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
- LINKS = (('Simulation and Software Technology', 'https://www.dlr.de/sc'),
+ LINKS = (('Institute for Software Technology', 'https://www.dlr.de/sc'),
('Imprint', '/pages/imprint.html'),
('Privacy', '/pages/privacy.html'),)
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/RCEnvironment'),
('YouTube', 'https://www.youtube.com/user/rcenvironment'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# Static paths
STATIC_PATHS = ['images', 'pages/images', 'extra/CNAME']
# Plugins
PLUGIN_PATHS = ["plugins", "d:\\rce\\plugins"]
PLUGINS = ['pelican-page-hierarchy.page_hierarchy',]
# Github pages domain name
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
|
Update link to institute in footer to say "Institute for Software Technology"
|
## Code Before:
from __future__ import unicode_literals
AUTHOR = u'DLR'
SITENAME = u'RCE'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
DEFAULT_DATE_FORMAT = '%a %d %B %Y'
THEME = 'themes/polar'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Simulation and Software Technology', 'https://www.dlr.de/sc'),
('Imprint', '/pages/imprint.html'),
('Privacy', '/pages/privacy.html'),)
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/RCEnvironment'),
('YouTube', 'https://www.youtube.com/user/rcenvironment'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# Static paths
STATIC_PATHS = ['images', 'pages/images', 'extra/CNAME']
# Plugins
PLUGIN_PATHS = ["plugins", "d:\\rce\\plugins"]
PLUGINS = ['pelican-page-hierarchy.page_hierarchy',]
# Github pages domain name
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
## Instruction:
Update link to institute in footer to say "Institute for Software Technology"
## Code After:
from __future__ import unicode_literals
AUTHOR = u'DLR'
SITENAME = u'RCE'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Paris'
DEFAULT_LANG = u'en'
DEFAULT_DATE_FORMAT = '%a %d %B %Y'
THEME = 'themes/polar'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Institute for Software Technology', 'https://www.dlr.de/sc'),
('Imprint', '/pages/imprint.html'),
('Privacy', '/pages/privacy.html'),)
# Social widget
SOCIAL = (('Twitter', 'https://twitter.com/RCEnvironment'),
('YouTube', 'https://www.youtube.com/user/rcenvironment'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# Static paths
STATIC_PATHS = ['images', 'pages/images', 'extra/CNAME']
# Plugins
PLUGIN_PATHS = ["plugins", "d:\\rce\\plugins"]
PLUGINS = ['pelican-page-hierarchy.page_hierarchy',]
# Github pages domain name
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
|
// ... existing code ...
# Blogroll
LINKS = (('Institute for Software Technology', 'https://www.dlr.de/sc'),
('Imprint', '/pages/imprint.html'),
// ... rest of the code ...
|
95474b52fd81b8363809fe915bd38d00335424a9
|
thinglang/execution/builtins.py
|
thinglang/execution/builtins.py
|
class ThingObjectBase(object):
def __getitem__(self, item):
return getattr(self, item)
def __contains__(self, item):
return hasattr(self, item)
class ThingObjectOutput(ThingObjectBase):
def __init__(self):
self.data = []
def write(self, *args):
self.data.append(' '.join(str(x) for x in args))
class ThingObjectInput(ThingObjectBase):
def __init__(self):
self.data = []
def get_line(self):
line = input()
self.data.append(line)
return line
|
class ThingObjectBase(object):
def __getitem__(self, item):
return getattr(self, item)
def __contains__(self, item):
return hasattr(self, item)
class ThingObjectOutput(ThingObjectBase):
def __init__(self):
self.data = []
def write(self, *args):
self.data.append(' '.join(str(x) for x in args))
class ThingObjectInput(ThingObjectBase):
def __init__(self, heap):
self.data = []
self.heap = heap
def get_line(self, line=None):
if line is not None:
self.heap['Output'].write(line)
line = input()
self.data.append(line)
return line
|
Update Input object to support direct output during get_line operations
|
Update Input object to support direct output during get_line operations
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
class ThingObjectBase(object):
def __getitem__(self, item):
return getattr(self, item)
def __contains__(self, item):
return hasattr(self, item)
class ThingObjectOutput(ThingObjectBase):
def __init__(self):
self.data = []
def write(self, *args):
self.data.append(' '.join(str(x) for x in args))
class ThingObjectInput(ThingObjectBase):
- def __init__(self):
+ def __init__(self, heap):
self.data = []
+ self.heap = heap
- def get_line(self):
+ def get_line(self, line=None):
+ if line is not None:
+ self.heap['Output'].write(line)
+
line = input()
self.data.append(line)
return line
|
Update Input object to support direct output during get_line operations
|
## Code Before:
class ThingObjectBase(object):
def __getitem__(self, item):
return getattr(self, item)
def __contains__(self, item):
return hasattr(self, item)
class ThingObjectOutput(ThingObjectBase):
def __init__(self):
self.data = []
def write(self, *args):
self.data.append(' '.join(str(x) for x in args))
class ThingObjectInput(ThingObjectBase):
def __init__(self):
self.data = []
def get_line(self):
line = input()
self.data.append(line)
return line
## Instruction:
Update Input object to support direct output during get_line operations
## Code After:
class ThingObjectBase(object):
def __getitem__(self, item):
return getattr(self, item)
def __contains__(self, item):
return hasattr(self, item)
class ThingObjectOutput(ThingObjectBase):
def __init__(self):
self.data = []
def write(self, *args):
self.data.append(' '.join(str(x) for x in args))
class ThingObjectInput(ThingObjectBase):
def __init__(self, heap):
self.data = []
self.heap = heap
def get_line(self, line=None):
if line is not None:
self.heap['Output'].write(line)
line = input()
self.data.append(line)
return line
|
// ... existing code ...
def __init__(self, heap):
self.data = []
self.heap = heap
def get_line(self, line=None):
if line is not None:
self.heap['Output'].write(line)
line = input()
// ... rest of the code ...
|
e1a27161621038cc3bdfd4030aef130ee09e92ec
|
troposphere/dax.py
|
troposphere/dax.py
|
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
|
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterEndpointEncryptionType": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
|
Update DAX per 2021-06-24 changes
|
Update DAX per 2021-06-24 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
+ "ClusterEndpointEncryptionType": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
|
Update DAX per 2021-06-24 changes
|
## Code Before:
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
## Instruction:
Update DAX per 2021-06-24 changes
## Code After:
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterEndpointEncryptionType": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
|
...
"AvailabilityZones": (str, False),
"ClusterEndpointEncryptionType": (str, False),
"ClusterName": (str, False),
...
|
9793107fb218bdff796d8df55404156e299e33ea
|
website/apps/ts_om/check.py
|
website/apps/ts_om/check.py
|
import os
from django.conf import settings
__author__ = 'nreed'
url_dict = {
'validate': 'http://127.0.0.1:8000/om_validate/validate/',
'scenarios': '/home/nreed/scenarios/',
'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/'
}
def check_dir(local_dir, typ):
if local_dir is None or local_dir == '':
return url_dict[typ]
if os.name == "nt":
if not local_dir.endswith('\\'):
local_dir += '\\'
else:
if not local_dir.endswith('/'):
local_dir += '/'
return local_dir
def check_url(url, typ):
if url is None or url == '':
return url_dict[typ]
if not url.endswith('/'):
url += '/'
return url
|
import os
from django.conf import settings
__author__ = 'nreed'
url_dict = {
'validate': 'http://127.0.0.1:8000/om_validate/validate/',
'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/',
'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/'
}
def check_dir(local_dir, typ):
if local_dir is None or local_dir == '':
return url_dict[typ]
if os.name == "nt":
if not local_dir.endswith('\\'):
local_dir += '\\'
else:
if not local_dir.endswith('/'):
local_dir += '/'
return local_dir
def check_url(url, typ):
if url is None or url == '':
return url_dict[typ]
if not url.endswith('/'):
url += '/'
return url
|
Set default scenarios directory to within root of project.
|
Set default scenarios directory to within root of project.
|
Python
|
mpl-2.0
|
vecnet/om,vecnet/om,vecnet/om,vecnet/om,vecnet/om
|
import os
from django.conf import settings
__author__ = 'nreed'
url_dict = {
'validate': 'http://127.0.0.1:8000/om_validate/validate/',
- 'scenarios': '/home/nreed/scenarios/',
+ 'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/',
'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/'
}
def check_dir(local_dir, typ):
if local_dir is None or local_dir == '':
return url_dict[typ]
if os.name == "nt":
if not local_dir.endswith('\\'):
local_dir += '\\'
else:
if not local_dir.endswith('/'):
local_dir += '/'
return local_dir
def check_url(url, typ):
if url is None or url == '':
return url_dict[typ]
if not url.endswith('/'):
url += '/'
return url
|
Set default scenarios directory to within root of project.
|
## Code Before:
import os
from django.conf import settings
__author__ = 'nreed'
url_dict = {
'validate': 'http://127.0.0.1:8000/om_validate/validate/',
'scenarios': '/home/nreed/scenarios/',
'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/'
}
def check_dir(local_dir, typ):
if local_dir is None or local_dir == '':
return url_dict[typ]
if os.name == "nt":
if not local_dir.endswith('\\'):
local_dir += '\\'
else:
if not local_dir.endswith('/'):
local_dir += '/'
return local_dir
def check_url(url, typ):
if url is None or url == '':
return url_dict[typ]
if not url.endswith('/'):
url += '/'
return url
## Instruction:
Set default scenarios directory to within root of project.
## Code After:
import os
from django.conf import settings
__author__ = 'nreed'
url_dict = {
'validate': 'http://127.0.0.1:8000/om_validate/validate/',
'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/',
'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/'
}
def check_dir(local_dir, typ):
if local_dir is None or local_dir == '':
return url_dict[typ]
if os.name == "nt":
if not local_dir.endswith('\\'):
local_dir += '\\'
else:
if not local_dir.endswith('/'):
local_dir += '/'
return local_dir
def check_url(url, typ):
if url is None or url == '':
return url_dict[typ]
if not url.endswith('/'):
url += '/'
return url
|
# ... existing code ...
'validate': 'http://127.0.0.1:8000/om_validate/validate/',
'scenarios': getattr(settings, "PROJECT_ROOT", '') + '/scenarios/',
'openmalaria': getattr(settings, "PROJECT_ROOT", '') + '/om_validate/bin/'
# ... rest of the code ...
|
ca348485b0acf2dbc41f2b4e52140d69e318327c
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile
class CtreConan(ConanFile):
name = "CTRE"
version = "2.0"
license = "MIT"
url = "https://github.com/hanickadot/compile-time-regular-expressions.git"
author = "Hana Dusíková ([email protected])"
description = "Compile Time Regular Expression for C++17/20"
homepage = "https://github.com/hanickadot/compile-time-regular-expressions"
exports = "LICENSE"
exports_sources = "include/*"
no_copy_source = True
def package(self):
self.copy("LICENSE", "licenses")
self.copy("*.hpp")
def package_id(self):
self.info.header_only()
|
from conans import ConanFile
class CtreConan(ConanFile):
name = "CTRE"
version = "2.0"
license = "MIT"
url = "https://github.com/hanickadot/compile-time-regular-expressions"
author = "Hana Dusíková ([email protected])"
description = "Compile Time Regular Expression for C++17/20"
homepage = "https://github.com/hanickadot/compile-time-regular-expressions"
no_copy_source = True
scm = {
"type": "git",
"url": "auto",
"revision": "auto"
}
def package(self):
self.copy("LICENSE", "licenses")
self.copy("*.hpp")
def package_id(self):
self.info.header_only()
|
Use SCM feature to build Conan package
|
Use SCM feature to build Conan package
- SCM could export all project files helping to build
Signed-off-by: Uilian Ries <[email protected]>
|
Python
|
apache-2.0
|
hanickadot/compile-time-regular-expressions,hanickadot/syntax-parser,hanickadot/compile-time-regular-expressions,hanickadot/compile-time-regular-expressions
|
from conans import ConanFile
class CtreConan(ConanFile):
name = "CTRE"
version = "2.0"
license = "MIT"
- url = "https://github.com/hanickadot/compile-time-regular-expressions.git"
+ url = "https://github.com/hanickadot/compile-time-regular-expressions"
author = "Hana Dusíková ([email protected])"
description = "Compile Time Regular Expression for C++17/20"
homepage = "https://github.com/hanickadot/compile-time-regular-expressions"
- exports = "LICENSE"
- exports_sources = "include/*"
no_copy_source = True
+ scm = {
+ "type": "git",
+ "url": "auto",
+ "revision": "auto"
+ }
def package(self):
self.copy("LICENSE", "licenses")
self.copy("*.hpp")
def package_id(self):
self.info.header_only()
|
Use SCM feature to build Conan package
|
## Code Before:
from conans import ConanFile
class CtreConan(ConanFile):
name = "CTRE"
version = "2.0"
license = "MIT"
url = "https://github.com/hanickadot/compile-time-regular-expressions.git"
author = "Hana Dusíková ([email protected])"
description = "Compile Time Regular Expression for C++17/20"
homepage = "https://github.com/hanickadot/compile-time-regular-expressions"
exports = "LICENSE"
exports_sources = "include/*"
no_copy_source = True
def package(self):
self.copy("LICENSE", "licenses")
self.copy("*.hpp")
def package_id(self):
self.info.header_only()
## Instruction:
Use SCM feature to build Conan package
## Code After:
from conans import ConanFile
class CtreConan(ConanFile):
name = "CTRE"
version = "2.0"
license = "MIT"
url = "https://github.com/hanickadot/compile-time-regular-expressions"
author = "Hana Dusíková ([email protected])"
description = "Compile Time Regular Expression for C++17/20"
homepage = "https://github.com/hanickadot/compile-time-regular-expressions"
no_copy_source = True
scm = {
"type": "git",
"url": "auto",
"revision": "auto"
}
def package(self):
self.copy("LICENSE", "licenses")
self.copy("*.hpp")
def package_id(self):
self.info.header_only()
|
# ... existing code ...
license = "MIT"
url = "https://github.com/hanickadot/compile-time-regular-expressions"
author = "Hana Dusíková ([email protected])"
# ... modified code ...
homepage = "https://github.com/hanickadot/compile-time-regular-expressions"
no_copy_source = True
scm = {
"type": "git",
"url": "auto",
"revision": "auto"
}
# ... rest of the code ...
|
109018326b317a160e0ba555b23b7b4401f44ed3
|
website/views.py
|
website/views.py
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
return HttpResponseRedirect(reverse('application_form'))
def test404(request):
return render(request, '404.html')
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
return HttpResponseRedirect(reverse('article', args=[6]))
def test404(request):
return render(request, '404.html')
|
Change redirect to application redirect
|
Change redirect to application redirect
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
- return HttpResponseRedirect(reverse('application_form'))
+ return HttpResponseRedirect(reverse('article', args=[6]))
def test404(request):
return render(request, '404.html')
|
Change redirect to application redirect
|
## Code Before:
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
return HttpResponseRedirect(reverse('application_form'))
def test404(request):
return render(request, '404.html')
## Instruction:
Change redirect to application redirect
## Code After:
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
return HttpResponseRedirect(reverse('article', args=[6]))
def test404(request):
return render(request, '404.html')
|
# ... existing code ...
def opptak(request):
return HttpResponseRedirect(reverse('article', args=[6]))
# ... rest of the code ...
|
2de494810b73dd69c6b4bb87e87007291309d573
|
lightstep/util.py
|
lightstep/util.py
|
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
|
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
|
Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.
|
Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.
|
Python
|
mit
|
lightstephq/lightstep-tracer-python
|
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
- return long(round(time.time() * constants.SECONDS_TO_MICRO))
+ return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
|
Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.
|
## Code Before:
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(time.time() * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
## Instruction:
Fix _time_to_micros bug. It was calling time.time() when it should use its own argument.
## Code After:
import random
import time
from . import constants
def _service_url_from_hostport(secure, host, port):
"""
Create an appropriate service URL given the parameters.
`secure` should be a bool.
"""
if secure:
protocol = 'https://'
else:
protocol = 'http://'
return ''.join([protocol, host, ':', str(port), '/_rpc/v1/reports/binary'])
def _generate_guid():
"""
Construct a guid - random 64 bit integer converted to a string.
"""
return str(random.getrandbits(64))
def _now_micros():
"""
Get the current time in microseconds since the epoch.
"""
return _time_to_micros(time.time())
def _time_to_micros(t):
"""
Convert a time.time()-style timestamp to microseconds.
"""
return long(round(t * constants.SECONDS_TO_MICRO))
def _merge_dicts(*dict_args):
"""Destructively merges dictionaries, returns None instead of an empty dictionary.
Elements of dict_args can be None.
Keys in latter dicts override those in earlier ones.
"""
result = {}
for dictionary in dict_args:
if dictionary:
result.update(dictionary)
return result if result else None
|
// ... existing code ...
"""
return long(round(t * constants.SECONDS_TO_MICRO))
// ... rest of the code ...
|
8b33e63ab84e2da2168259d8ce17c8afac964500
|
cacheops/management/commands/cleanfilecache.py
|
cacheops/management/commands/cleanfilecache.py
|
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def handle(self, **options):
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % settings.FILE_CACHE_DIR)
|
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def add_arguments(self, parser):
parser.add_argument('path', nargs='*', default=['default'])
def handle(self, **options):
for path in options['path']:
if path == 'default':
path = settings.FILE_CACHE_DIR
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
|
Allow cleaning file cache in non default place
|
Allow cleaning file cache in non default place
|
Python
|
bsd-3-clause
|
LPgenerator/django-cacheops,Suor/django-cacheops
|
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
+ def add_arguments(self, parser):
+ parser.add_argument('path', nargs='*', default=['default'])
+
def handle(self, **options):
+ for path in options['path']:
+ if path == 'default':
+ path = settings.FILE_CACHE_DIR
- os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % settings.FILE_CACHE_DIR)
+ os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
|
Allow cleaning file cache in non default place
|
## Code Before:
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def handle(self, **options):
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % settings.FILE_CACHE_DIR)
## Instruction:
Allow cleaning file cache in non default place
## Code After:
import os
from django.core.management.base import BaseCommand
from cacheops.conf import settings
class Command(BaseCommand):
help = 'Clean filebased cache'
def add_arguments(self, parser):
parser.add_argument('path', nargs='*', default=['default'])
def handle(self, **options):
for path in options['path']:
if path == 'default':
path = settings.FILE_CACHE_DIR
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
|
// ... existing code ...
def add_arguments(self, parser):
parser.add_argument('path', nargs='*', default=['default'])
def handle(self, **options):
for path in options['path']:
if path == 'default':
path = settings.FILE_CACHE_DIR
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % path)
// ... rest of the code ...
|
018f8e7c7c69eefeb121c8552eb319b4b550f251
|
backslash/error_container.py
|
backslash/error_container.py
|
from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, exception, exception_type, traceback, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'exception': exception,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
|
from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'message': message,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
|
Unify errors and failures in API
|
Unify errors and failures in API
|
Python
|
bsd-3-clause
|
vmalloc/backslash-python,slash-testing/backslash-python
|
from sentinels import NOTHING
class ErrorContainer(object):
- def add_error(self, exception, exception_type, traceback, timestamp=NOTHING):
+ def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
- 'exception': exception,
+ 'message': message,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
|
Unify errors and failures in API
|
## Code Before:
from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, exception, exception_type, traceback, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'exception': exception,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
## Instruction:
Unify errors and failures in API
## Code After:
from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'message': message,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
|
...
def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'message': message,
'exception_type': exception_type,
...
|
f7d3fa716cd73c5a066aa0e40c337b50880befea
|
lc005_longest_palindromic_substring.py
|
lc005_longest_palindromic_substring.py
|
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
|
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
Complete naive longest palindromic substring
|
Complete naive longest palindromic substring
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
- class Solution(object):
+ class SolutionNaive(object):
- def longestPalindrome(self, s):
+ def longestPalindrome(self, s):
- """
- :type s: str
+ """
+ :type s: str
:rtype: str
- """
- pass
+
+ Time limit exceeded.
+ """
+ n = len(s)
+ max_len = 1
+ max_i = 0
+ max_j = 0
+
+ for i in range(n):
+ for j in range(i + 1, n):
+ if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
+ ij_len = j - i + 1
+ if ij_len > max_len:
+ max_len = ij_len
+ max_i = i
+ max_j = j
+ else:
+ break
+ return s[max_i:(max_j + 1)]
+
+
+ class SolutionDP(object):
+ def longestPalindrome(self, s):
+ """
+ :type s: str
+ :rtype: str
+ """
+ pass
def main():
- s = 'babad'
+ s = 'babad' # Ans: bab.
- print(Solution().longestPalindrome(s))
+ print(SolutionNaive().longestPalindrome(s))
- s = 'cbbd'
+ s = 'cbbd' # Ans: bb.
- print(Solution().longestPalindrome(s))
+ print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
- main()
+ main()
|
Complete naive longest palindromic substring
|
## Code Before:
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad'
print(Solution().longestPalindrome(s))
s = 'cbbd'
print(Solution().longestPalindrome(s))
if __name__ == '__main__':
main()
## Instruction:
Complete naive longest palindromic substring
## Code After:
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
if __name__ == '__main__':
main()
|
// ... existing code ...
class SolutionNaive(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
Time limit exceeded.
"""
n = len(s)
max_len = 1
max_i = 0
max_j = 0
for i in range(n):
for j in range(i + 1, n):
if s[i:(j + 1)] == s[i:(j + 1)][::-1]:
ij_len = j - i + 1
if ij_len > max_len:
max_len = ij_len
max_i = i
max_j = j
else:
break
return s[max_i:(max_j + 1)]
class SolutionDP(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
pass
// ... modified code ...
def main():
s = 'babad' # Ans: bab.
print(SolutionNaive().longestPalindrome(s))
s = 'cbbd' # Ans: bb.
print(SolutionNaive().longestPalindrome(s))
...
if __name__ == '__main__':
main()
// ... rest of the code ...
|
5227ef25d9944c5e33b4a4f7e58259e3646ae52a
|
interactive.py
|
interactive.py
|
import pyRecipeBook
import FoodGroups
#Welcome screen
welcomeMessage = "Welcome to pyRecipeBook!\n"
welcomeMessage += "Enter a command below:\n"
print(welcomeMessage)
#
pre = '# '
on = True
#Keep asking for inpyt
while(on):
command = input(pre)
#Run command
#Exiting commands
"Thank you for choosing to use pyRecipeBook"
|
import pyRecipeBook
import FoodGroups
#Welcome screen
welcomeMessage = "Welcome to pyRecipeBook!\n"
welcomeMessage += "Enter a command below:\n"
print(welcomeMessage)
#Method to run commands
def runCommand(command):
if command.strip() == 'exit':
return False
else:
return True
#
pre = '# '
on = True
#Keep asking for inpyt
while(on):
#Enter a command
command = raw_input(pre)
#Run command
on = runCommand(command)
#Exiting commands
exitMessage = "\nThank you for choosing to use pyRecipeBook!\n"
print(exitMessage)
|
Update interacitve.py - Add a method to run predefined commands.
|
Update interacitve.py
- Add a method to run predefined commands.
|
Python
|
mit
|
VictorLoren/pyRecipeBook
|
import pyRecipeBook
import FoodGroups
#Welcome screen
welcomeMessage = "Welcome to pyRecipeBook!\n"
welcomeMessage += "Enter a command below:\n"
print(welcomeMessage)
+ #Method to run commands
+ def runCommand(command):
+ if command.strip() == 'exit':
+ return False
+ else:
+ return True
#
pre = '# '
on = True
#Keep asking for inpyt
while(on):
+ #Enter a command
- command = input(pre)
+ command = raw_input(pre)
#Run command
+ on = runCommand(command)
- #Exiting commands
+ #Exiting commands
- "Thank you for choosing to use pyRecipeBook"
+ exitMessage = "\nThank you for choosing to use pyRecipeBook!\n"
+ print(exitMessage)
|
Update interacitve.py - Add a method to run predefined commands.
|
## Code Before:
import pyRecipeBook
import FoodGroups
#Welcome screen
welcomeMessage = "Welcome to pyRecipeBook!\n"
welcomeMessage += "Enter a command below:\n"
print(welcomeMessage)
#
pre = '# '
on = True
#Keep asking for inpyt
while(on):
command = input(pre)
#Run command
#Exiting commands
"Thank you for choosing to use pyRecipeBook"
## Instruction:
Update interacitve.py - Add a method to run predefined commands.
## Code After:
import pyRecipeBook
import FoodGroups
#Welcome screen
welcomeMessage = "Welcome to pyRecipeBook!\n"
welcomeMessage += "Enter a command below:\n"
print(welcomeMessage)
#Method to run commands
def runCommand(command):
if command.strip() == 'exit':
return False
else:
return True
#
pre = '# '
on = True
#Keep asking for inpyt
while(on):
#Enter a command
command = raw_input(pre)
#Run command
on = runCommand(command)
#Exiting commands
exitMessage = "\nThank you for choosing to use pyRecipeBook!\n"
print(exitMessage)
|
...
#Method to run commands
def runCommand(command):
if command.strip() == 'exit':
return False
else:
return True
...
while(on):
#Enter a command
command = raw_input(pre)
#Run command
on = runCommand(command)
#Exiting commands
exitMessage = "\nThank you for choosing to use pyRecipeBook!\n"
print(exitMessage)
...
|
1b95969110f97af397cb3314b59c30679911da48
|
scripts/scrape-cdc-state-case-counts.py
|
scripts/scrape-cdc-state-case-counts.py
|
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
Update CDC scraper to handle new format
|
Update CDC scraper to handle new format
|
Python
|
mit
|
BuzzFeedNews/zika-data
|
import requests
import lxml.html
import pandas as pd
+ import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
+
+ paren_pat = re.compile(r"\([^\)]+\)")
+
+ def parse_cell(text):
+ return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
- cells = [ [ td.text_content().strip()
+ cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
Update CDC scraper to handle new format
|
## Code Before:
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
## Instruction:
Update CDC scraper to handle new format
## Code After:
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
# ... existing code ...
import pandas as pd
import re
import sys
# ... modified code ...
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
...
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
# ... rest of the code ...
|
d44010acc32fcb78570cd34478d0f4e8f1cfa979
|
utility/dbproc.py
|
utility/dbproc.py
|
from discord.ext import commands
from utils import *
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from member import Base, Member
import discord
import asyncio
class Baydb:
engine = create_engine('sqlite:///bayohwoolph.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
conn = engine.connect()
|
from discord.ext import commands
from utils import *
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from member import Base, Member
from config import Config
import discord
import asyncio
class Baydb:
engine = create_engine(Config.MAIN['dbpath'])
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
conn = engine.connect()
|
Move another usage of DB into ini file thing.
|
Move another usage of DB into ini file thing.
|
Python
|
agpl-3.0
|
dark-echo/Bay-Oh-Woolph,freiheit/Bay-Oh-Woolph
|
from discord.ext import commands
from utils import *
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from member import Base, Member
+ from config import Config
import discord
import asyncio
class Baydb:
- engine = create_engine('sqlite:///bayohwoolph.db')
+ engine = create_engine(Config.MAIN['dbpath'])
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
conn = engine.connect()
|
Move another usage of DB into ini file thing.
|
## Code Before:
from discord.ext import commands
from utils import *
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from member import Base, Member
import discord
import asyncio
class Baydb:
engine = create_engine('sqlite:///bayohwoolph.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
conn = engine.connect()
## Instruction:
Move another usage of DB into ini file thing.
## Code After:
from discord.ext import commands
from utils import *
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from member import Base, Member
from config import Config
import discord
import asyncio
class Baydb:
engine = create_engine(Config.MAIN['dbpath'])
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
conn = engine.connect()
|
# ... existing code ...
from member import Base, Member
from config import Config
import discord
# ... modified code ...
engine = create_engine(Config.MAIN['dbpath'])
Base.metadata.bind = engine
# ... rest of the code ...
|
2141e4fd2b09d3a8a95e032fb02eafb9e6f818c9
|
i3pystatus/shell.py
|
i3pystatus/shell.py
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
Add exception handling for output
|
Add exception handling for output
|
Python
|
mit
|
opatut/i3pystatus,teto/i3pystatus,schroeji/i3pystatus,ncoop/i3pystatus,juliushaertl/i3pystatus,m45t3r/i3pystatus,richese/i3pystatus,claria/i3pystatus,ncoop/i3pystatus,paulollivier/i3pystatus,paulollivier/i3pystatus,ismaelpuerto/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,fmarchenko/i3pystatus,plumps/i3pystatus,onkelpit/i3pystatus,yang-ling/i3pystatus,yang-ling/i3pystatus,MaicoTimmerman/i3pystatus,richese/i3pystatus,plumps/i3pystatus,onkelpit/i3pystatus,schroeji/i3pystatus,juliushaertl/i3pystatus,drwahl/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,drwahl/i3pystatus,facetoe/i3pystatus,Arvedui/i3pystatus,m45t3r/i3pystatus,Arvedui/i3pystatus,Elder-of-Ozone/i3pystatus,fmarchenko/i3pystatus,enkore/i3pystatus,opatut/i3pystatus,facetoe/i3pystatus,claria/i3pystatus,Elder-of-Ozone/i3pystatus,enkore/i3pystatus,MaicoTimmerman/i3pystatus,ismaelpuerto/i3pystatus,teto/i3pystatus
|
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
-
+ try:
- if out[-1] == " ":
+ if out[-1] == " ":
- out = out[:-1]
+ out = out[:-1]
+ except:
+ out = ""
self.output = {
"full_text": out,
"color": color
}
|
Add exception handling for output
|
## Code Before:
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
if out[-1] == " ":
out = out[:-1]
self.output = {
"full_text": out,
"color": color
}
## Instruction:
Add exception handling for output
## Code After:
from i3pystatus import IntervalModule
from subprocess import check_output, CalledProcessError
class Shell(IntervalModule):
"""
Shows output of shell command
"""
color = "#FFFFFF"
error_color = "#FF0000"
settings = (
("command", "command to be executed"),
("color", "standard color"),
("error_color", "color to use when non zero exit code is returned")
)
required = ("command",)
def run(self):
try:
out = check_output(self.command, shell=True)
color = self.color
except CalledProcessError as e:
out = e.output
color = self.error_color
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
self.output = {
"full_text": out,
"color": color
}
|
# ... existing code ...
out = out.decode("UTF-8").replace("\n", " ")
try:
if out[-1] == " ":
out = out[:-1]
except:
out = ""
# ... rest of the code ...
|
7d02bd555d7519d485d00e02136d26a6e4e7096e
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
Drop FK before dropping instance_id column.
|
Drop FK before dropping instance_id column.
|
Python
|
apache-2.0
|
sacharya/nova,jianghuaw/nova,leilihh/novaha,eneabio/nova,vladikr/nova_drafts,KarimAllah/nova,sileht/deb-openstack-nova,Stavitsky/nova,DirectXMan12/nova-hacking,akash1808/nova_test_latest,raildo/nova,gspilio/nova,tangfeixiong/nova,jianghuaw/nova,Juniper/nova,JioCloud/nova,zhimin711/nova,usc-isi/nova,orbitfp7/nova,JianyuWang/nova,vmturbo/nova,sebrandon1/nova,jeffrey4l/nova,Francis-Liu/animated-broccoli,psiwczak/openstack,MountainWei/nova,tianweizhang/nova,yrobla/nova,maelnor/nova,whitepages/nova,maoy/zknova,joker946/nova,russellb/nova,iuliat/nova,qwefi/nova,rahulunair/nova,berrange/nova,sileht/deb-openstack-nova,mahak/nova,fnordahl/nova,sridevikoushik31/openstack,Metaswitch/calico-nova,gooddata/openstack-nova,sebrandon1/nova,redhat-openstack/nova,eayunstack/nova,mandeepdhami/nova,tealover/nova,eharney/nova,yrobla/nova,CEG-FYP-OpenStack/scheduler,TieWei/nova,maelnor/nova,TwinkleChawla/nova,KarimAllah/nova,cloudbau/nova,isyippee/nova,mikalstill/nova,hanlind/nova,mgagne/nova,badock/nova,qwefi/nova,paulmathews/nova,kimjaejoong/nova,spring-week-topos/nova-week,plumgrid/plumgrid-nova,alaski/nova,petrutlucian94/nova,thomasem/nova,barnsnake351/nova,cernops/nova,akash1808/nova,Triv90/Nova,yrobla/nova,watonyweng/nova,akash1808/nova_test_latest,NoBodyCam/TftpPxeBootBareMetal,Tehsmash/nova,Juniper/nova,iuliat/nova,orbitfp7/nova,alexandrucoman/vbox-nova-driver,aristanetworks/arista-ovs-nova,fnordahl/nova,cernops/nova,zaina/nova,projectcalico/calico-nova,russellb/nova,apporc/nova,j-carpentier/nova,shahar-stratoscale/nova,DirectXMan12/nova-hacking,tealover/nova,vmturbo/nova,rahulunair/nova,JianyuWang/nova,varunarya10/nova_test_latest,imsplitbit/nova,klmitch/nova,silenceli/nova,NewpTone/stacklab-nova,apporc/nova,devendermishrajio/nova_test_latest,dawnpower/nova,alvarolopez/nova,felixma/nova,saleemjaveds/https-github.com-openstack-nova,adelina-t/nova,angdraug/nova,mikalstill/nova,akash1808/nova,Yuriy-Leonov/nova,CiscoSystems/nova,klmitch/nova,watonyweng/nova,devoid/nova,bgxavier/nova,citrix-openstack-build/nova,psiwczak/openstack,nikesh-mahalka/nova,sridevikoushik31/nova,CiscoSystems/nova,joker946/nova,JioCloud/nova,salv-orlando/MyRepo,rrader/nova-docker-plugin,kimjaejoong/nova,rickerc/nova_audit,savi-dev/nova,sridevikoushik31/nova,hanlind/nova,DirectXMan12/nova-hacking,blueboxgroup/nova,JioCloud/nova_test_latest,eonpatapon/nova,luogangyi/bcec-nova,belmiromoreira/nova,fajoy/nova,rickerc/nova_audit,double12gzh/nova,sileht/deb-openstack-nova,cloudbase/nova,eayunstack/nova,NeCTAR-RC/nova,aristanetworks/arista-ovs-nova,CCI-MOC/nova,sridevikoushik31/openstack,silenceli/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,virtualopensystems/nova,Juniper/nova,devendermishrajio/nova,tudorvio/nova,edulramirez/nova,bgxavier/nova,cyx1231st/nova,shootstar/novatest,varunarya10/nova_test_latest,maheshp/novatest,cernops/nova,imsplitbit/nova,maheshp/novatest,russellb/nova,josephsuh/extra-specs,mahak/nova,mgagne/nova,plumgrid/plumgrid-nova,gspilio/nova,sridevikoushik31/nova,luogangyi/bcec-nova,NoBodyCam/TftpPxeBootBareMetal,alaski/nova,cloudbau/nova,CloudServer/nova,bigswitch/nova,houshengbo/nova_vmware_compute_driver,dawnpower/nova,rajalokan/nova,belmiromoreira/nova,virtualopensystems/nova,saleemjaveds/https-github.com-openstack-nova,bclau/nova,eonpatapon/nova,Juniper/nova,citrix-openstack-build/nova,j-carpentier/nova,sacharya/nova,zhimin711/nova,Yusuke1987/openstack_template,angdraug/nova,mmnelemane/nova,eneabio/nova,cloudbase/nova,klmitch/nova,vmturbo/nova,openstack/nova,zaina/nova,edulramirez/nova,eharney/nova,josephsuh/extra-specs,cloudbase/nova,shail2810/nova,jianghuaw/nova,Triv90/Nova,NeCTAR-RC/nova,viggates/nova,zzicewind/nova,LoHChina/nova,vmturbo/nova,spring-week-topos/nova-week,noironetworks/nova,rajalokan/nova,openstack/nova,berrange/nova,takeshineshiro/nova,eneabio/nova,cloudbase/nova-virtualbox,felixma/nova,fajoy/nova,whitepages/nova,usc-isi/extra-specs,psiwczak/openstack,ruslanloman/nova,isyippee/nova,ruslanloman/nova,petrutlucian94/nova_dev,dstroppa/openstack-smartos-nova-grizzly,shahar-stratoscale/nova,bclau/nova,josephsuh/extra-specs,SUSE-Cloud/nova,vladikr/nova_drafts,noironetworks/nova,fajoy/nova,ntt-sic/nova,maoy/zknova,Francis-Liu/animated-broccoli,BeyondTheClouds/nova,blueboxgroup/nova,LoHChina/nova,cloudbase/nova-virtualbox,Triv90/Nova,jianghuaw/nova,SUSE-Cloud/nova,leilihh/novaha,devoid/nova,salv-orlando/MyRepo,Yuriy-Leonov/nova,jeffrey4l/nova,NewpTone/stacklab-nova,tangfeixiong/nova,zzicewind/nova,houshengbo/nova_vmware_compute_driver,yosshy/nova,BeyondTheClouds/nova,sridevikoushik31/openstack,aristanetworks/arista-ovs-nova,maheshp/novatest,OpenAcademy-OpenStack/nova-scheduler,mandeepdhami/nova,phenoxim/nova,paulmathews/nova,usc-isi/nova,TwinkleChawla/nova,mikalstill/nova,Metaswitch/calico-nova,ntt-sic/nova,KarimAllah/nova,houshengbo/nova_vmware_compute_driver,projectcalico/calico-nova,CloudServer/nova,savi-dev/nova,usc-isi/extra-specs,tanglei528/nova,yatinkumbhare/openstack-nova,tianweizhang/nova,Stavitsky/nova,gooddata/openstack-nova,redhat-openstack/nova,Yusuke1987/openstack_template,sridevikoushik31/nova,yatinkumbhare/openstack-nova,tanglei528/nova,leilihh/nova,dstroppa/openstack-smartos-nova-grizzly,klmitch/nova,mahak/nova,sebrandon1/nova,bigswitch/nova,rajalokan/nova,rrader/nova-docker-plugin,leilihh/nova,raildo/nova,mmnelemane/nova,ewindisch/nova,dstroppa/openstack-smartos-nova-grizzly,gooddata/openstack-nova,Tehsmash/nova,JioCloud/nova_test_latest,petrutlucian94/nova_dev,ted-gould/nova,rahulunair/nova,dims/nova,badock/nova,yosshy/nova,MountainWei/nova,scripnichenko/nova,double12gzh/nova,gspilio/nova,OpenAcademy-OpenStack/nova-scheduler,nikesh-mahalka/nova,shootstar/novatest,savi-dev/nova,scripnichenko/nova,gooddata/openstack-nova,devendermishrajio/nova_test_latest,usc-isi/extra-specs,CCI-MOC/nova,takeshineshiro/nova,usc-isi/nova,NewpTone/stacklab-nova,dims/nova,adelina-t/nova,phenoxim/nova,shail2810/nova,cyx1231st/nova,alvarolopez/nova,alexandrucoman/vbox-nova-driver,barnsnake351/nova,openstack/nova,affo/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,affo/nova,maoy/zknova,thomasem/nova,ted-gould/nova,petrutlucian94/nova,viggates/nova,CEG-FYP-OpenStack/scheduler,TieWei/nova,salv-orlando/MyRepo,hanlind/nova,rajalokan/nova,NoBodyCam/TftpPxeBootBareMetal,ewindisch/nova,BeyondTheClouds/nova,devendermishrajio/nova,paulmathews/nova,tudorvio/nova
|
from sqlalchemy import Column, Integer, String, MetaData, Table
+
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
+
+ if migrate_engine.name == "mysql":
+ migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
+ "`migrations_ibfk_1`;")
+
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
Drop FK before dropping instance_id column.
|
## Code Before:
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
## Instruction:
Drop FK before dropping instance_id column.
## Code After:
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
// ... existing code ...
from sqlalchemy import Column, Integer, String, MetaData, Table
// ... modified code ...
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
// ... rest of the code ...
|
185571932f518760bb3045347578caa98ef820f5
|
froide/foiidea/views.py
|
froide/foiidea/views.py
|
from django.shortcuts import render
from foiidea.models import Article
def index(request):
return render(request, 'foiidea/index.html', {
'object_list': Article.objects.all().select_related('public_bodies', 'foirequests')
})
|
from django.shortcuts import render
from foiidea.models import Article
def index(request):
return render(request, 'foiidea/index.html', {
'object_list': Article.objects.get_ordered()
})
|
Use manager method for view articles
|
Use manager method for view articles
|
Python
|
mit
|
fin/froide,LilithWittmann/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,ryankanno/froide,LilithWittmann/froide,stefanw/froide,okfse/froide,okfse/froide,ryankanno/froide,ryankanno/froide,ryankanno/froide,LilithWittmann/froide,stefanw/froide,fin/froide,CodeforHawaii/froide,catcosmo/froide,CodeforHawaii/froide,okfse/froide,LilithWittmann/froide,catcosmo/froide,stefanw/froide,LilithWittmann/froide,ryankanno/froide,catcosmo/froide,okfse/froide,fin/froide,catcosmo/froide,fin/froide,stefanw/froide,okfse/froide,CodeforHawaii/froide,catcosmo/froide
|
from django.shortcuts import render
from foiidea.models import Article
def index(request):
return render(request, 'foiidea/index.html', {
- 'object_list': Article.objects.all().select_related('public_bodies', 'foirequests')
+ 'object_list': Article.objects.get_ordered()
})
|
Use manager method for view articles
|
## Code Before:
from django.shortcuts import render
from foiidea.models import Article
def index(request):
return render(request, 'foiidea/index.html', {
'object_list': Article.objects.all().select_related('public_bodies', 'foirequests')
})
## Instruction:
Use manager method for view articles
## Code After:
from django.shortcuts import render
from foiidea.models import Article
def index(request):
return render(request, 'foiidea/index.html', {
'object_list': Article.objects.get_ordered()
})
|
# ... existing code ...
return render(request, 'foiidea/index.html', {
'object_list': Article.objects.get_ordered()
})
# ... rest of the code ...
|
a25b03f83c7003ccea2eb554117e8fedc153e4fe
|
corgi/coerce.py
|
corgi/coerce.py
|
def listify(obj):
if not isinstance(obj, list):
return [obj]
return obj
def dictify(obj, key):
if isinstance(obj, dict):
return obj
return {key: obj}
|
def listify(obj):
if not isinstance(obj, list):
return [obj]
return obj
def dictify(obj, key):
if not isinstance(obj, dict):
return {key: obj}
return obj
|
Make dictify similar in flow to listify
|
Make dictify similar in flow to listify
|
Python
|
mit
|
log0ymxm/corgi
|
def listify(obj):
if not isinstance(obj, list):
return [obj]
return obj
def dictify(obj, key):
- if isinstance(obj, dict):
+ if not isinstance(obj, dict):
- return obj
- return {key: obj}
+ return {key: obj}
+ return obj
|
Make dictify similar in flow to listify
|
## Code Before:
def listify(obj):
if not isinstance(obj, list):
return [obj]
return obj
def dictify(obj, key):
if isinstance(obj, dict):
return obj
return {key: obj}
## Instruction:
Make dictify similar in flow to listify
## Code After:
def listify(obj):
if not isinstance(obj, list):
return [obj]
return obj
def dictify(obj, key):
if not isinstance(obj, dict):
return {key: obj}
return obj
|
...
def dictify(obj, key):
if not isinstance(obj, dict):
return {key: obj}
return obj
...
|
6729515de02ce0678793ffb8faf280e65a4376e2
|
run.py
|
run.py
|
import sys
from core import KDPVGenerator
def print_help():
print('Usage: python run.py [data.yml]')
def generate(filename):
generator = KDPVGenerator.from_yml(filename)
generator.generate()
def main():
if len(sys.argv) < 2:
filename = 'data.yml'
else:
filename = sys.argv[1]
if filename in {'help', '-h', '--help'}:
print_help()
else:
generate(filename)
if __name__ == '__main__':
main()
|
import argparse
import os
from core import KDPVGenerator
def generate(filename):
generator = KDPVGenerator.from_yml(filename)
generator.generate()
def main():
parser = argparse.ArgumentParser(description='KDPV Generator')
parser.add_argument('filename', nargs='?', default='data.yml', help='data file (default: data.yml)')
args = parser.parse_args()
if not args.filename:
parser.print_help()
else:
if not os.path.isfile(args.filename):
exit('Unable to open file: {}'.format(args.filename))
generate(args.filename)
if __name__ == '__main__':
main()
|
Add argparse, handle data file missing
|
Add argparse, handle data file missing
|
Python
|
mit
|
spbpython/kdpv_generator
|
+ import argparse
- import sys
+ import os
-
from core import KDPVGenerator
-
-
- def print_help():
- print('Usage: python run.py [data.yml]')
def generate(filename):
generator = KDPVGenerator.from_yml(filename)
generator.generate()
def main():
- if len(sys.argv) < 2:
- filename = 'data.yml'
-
+ parser = argparse.ArgumentParser(description='KDPV Generator')
+ parser.add_argument('filename', nargs='?', default='data.yml', help='data file (default: data.yml)')
+ args = parser.parse_args()
+ if not args.filename:
+ parser.print_help()
else:
+ if not os.path.isfile(args.filename):
+ exit('Unable to open file: {}'.format(args.filename))
- filename = sys.argv[1]
-
- if filename in {'help', '-h', '--help'}:
- print_help()
-
- else:
- generate(filename)
+ generate(args.filename)
if __name__ == '__main__':
main()
|
Add argparse, handle data file missing
|
## Code Before:
import sys
from core import KDPVGenerator
def print_help():
print('Usage: python run.py [data.yml]')
def generate(filename):
generator = KDPVGenerator.from_yml(filename)
generator.generate()
def main():
if len(sys.argv) < 2:
filename = 'data.yml'
else:
filename = sys.argv[1]
if filename in {'help', '-h', '--help'}:
print_help()
else:
generate(filename)
if __name__ == '__main__':
main()
## Instruction:
Add argparse, handle data file missing
## Code After:
import argparse
import os
from core import KDPVGenerator
def generate(filename):
generator = KDPVGenerator.from_yml(filename)
generator.generate()
def main():
parser = argparse.ArgumentParser(description='KDPV Generator')
parser.add_argument('filename', nargs='?', default='data.yml', help='data file (default: data.yml)')
args = parser.parse_args()
if not args.filename:
parser.print_help()
else:
if not os.path.isfile(args.filename):
exit('Unable to open file: {}'.format(args.filename))
generate(args.filename)
if __name__ == '__main__':
main()
|
// ... existing code ...
import argparse
import os
from core import KDPVGenerator
// ... modified code ...
def main():
parser = argparse.ArgumentParser(description='KDPV Generator')
parser.add_argument('filename', nargs='?', default='data.yml', help='data file (default: data.yml)')
args = parser.parse_args()
if not args.filename:
parser.print_help()
else:
if not os.path.isfile(args.filename):
exit('Unable to open file: {}'.format(args.filename))
generate(args.filename)
// ... rest of the code ...
|
29041cdaf3beca926f1dff1d3f147b7dc07ad8dd
|
pylp/cli/run.py
|
pylp/cli/run.py
|
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
Make pylpfile path more readable
|
Make pylpfile path more readable
|
Python
|
mit
|
pylp/pylp
|
import runpy, os, sys
import traceback
import asyncio
+ import pylp
- import pylp, pylp.cli.logger as logger
+ import pylp.cli.logger as logger
+ from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
+ readable_path = make_readable_path(path)
if not os.path.isfile(path):
- logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
+ logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
- logger.log("Using pylpfile ", logger.magenta(path))
+ logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
Make pylpfile path more readable
|
## Code Before:
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
## Instruction:
Make pylpfile path more readable
## Code After:
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
// ... existing code ...
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
// ... modified code ...
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
...
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
// ... rest of the code ...
|
8fc6ba648347a48065ab2fb26f940dc92919feeb
|
bands/__init__.py
|
bands/__init__.py
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
Implement new python-based menu format
|
Implement new python-based menu format
|
Python
|
agpl-3.0
|
magfest/bands,magfest/bands
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
+ c.MENU['People'].append_menu_item(
+ MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
+ )
+
|
Implement new python-based menu format
|
## Code Before:
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
## Instruction:
Implement new python-based menu format
## Code After:
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
# ... existing code ...
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
# ... rest of the code ...
|
bb9116940ffba48a1a930e7c3203bd2d8b8bbb6e
|
docs/examples/compute/pricing.py
|
docs/examples/compute/pricing.py
|
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
EC2_ACCESS_ID = 'your access id'
EC2_SECRET_KEY = 'your secret key'
cls = get_driver(Provider.EC2)
driver = cls(EC2_ACCESS_ID, EC2_SECRET_KEY)
sizes = driver.list_sizes()
>>> sizes[:5]
[<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None price=0.02 driver=Amazon EC2 ...>,
<NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None price=0.065 driver=Amazon EC2 ...>,
<NodeSize: id=m1.medium, name=Medium Instance, ram=3700 disk=410 bandwidth=None price=0.13 driver=Amazon EC2 ...>,
<NodeSize: id=m1.large, name=Large Instance, ram=7680 disk=850 bandwidth=None price=0.26 driver=Amazon EC2 ...>,
<NodeSize: id=m1.xlarge, name=Extra Large Instance, ram=15360 disk=1690 bandwidth=None price=0.52 driver=Amazon EC2 ...>]
>>> sizes[0].price
0.02
>>>
|
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
EC2_ACCESS_ID = 'your access id'
EC2_SECRET_KEY = 'your secret key'
cls = get_driver(Provider.EC2)
driver = cls(EC2_ACCESS_ID, EC2_SECRET_KEY)
sizes = driver.list_sizes()
# >>> sizes[:2]
# [<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None
# price=0.02 driver=Amazon EC2 ...>,
# <NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None
# price=0.065 driver=Amazon EC2 ...>,
# >>> sizes[0].price
# 0.02
# >>>
|
Fix pep8 violations in the doc examples.
|
Fix pep8 violations in the doc examples.
|
Python
|
apache-2.0
|
t-tran/libcloud,illfelder/libcloud,ByteInternet/libcloud,mgogoulos/libcloud,Scalr/libcloud,apache/libcloud,erjohnso/libcloud,jimbobhickville/libcloud,erjohnso/libcloud,sahildua2305/libcloud,curoverse/libcloud,sfriesel/libcloud,wrigri/libcloud,Scalr/libcloud,supertom/libcloud,sfriesel/libcloud,StackPointCloud/libcloud,aleGpereira/libcloud,Itxaka/libcloud,Kami/libcloud,briancurtin/libcloud,thesquelched/libcloud,kater169/libcloud,mbrukman/libcloud,jimbobhickville/libcloud,sahildua2305/libcloud,mtekel/libcloud,curoverse/libcloud,wuyuewen/libcloud,mgogoulos/libcloud,smaffulli/libcloud,iPlantCollaborativeOpenSource/libcloud,sergiorua/libcloud,Cloud-Elasticity-Services/as-libcloud,samuelchong/libcloud,schaubl/libcloud,aviweit/libcloud,pantheon-systems/libcloud,mistio/libcloud,aviweit/libcloud,Verizon/libcloud,wuyuewen/libcloud,MrBasset/libcloud,pquentin/libcloud,thesquelched/libcloud,techhat/libcloud,ZuluPro/libcloud,kater169/libcloud,sergiorua/libcloud,SecurityCompass/libcloud,samuelchong/libcloud,briancurtin/libcloud,illfelder/libcloud,Cloud-Elasticity-Services/as-libcloud,watermelo/libcloud,atsaki/libcloud,pquentin/libcloud,SecurityCompass/libcloud,aleGpereira/libcloud,DimensionDataCBUSydney/libcloud,sfriesel/libcloud,sahildua2305/libcloud,Scalr/libcloud,wido/libcloud,kater169/libcloud,mathspace/libcloud,niteoweb/libcloud,watermelo/libcloud,apache/libcloud,techhat/libcloud,munkiat/libcloud,sgammon/libcloud,Verizon/libcloud,thesquelched/libcloud,JamesGuthrie/libcloud,andrewsomething/libcloud,mathspace/libcloud,JamesGuthrie/libcloud,pquentin/libcloud,supertom/libcloud,niteoweb/libcloud,cryptickp/libcloud,curoverse/libcloud,munkiat/libcloud,ZuluPro/libcloud,DimensionDataCBUSydney/libcloud,pantheon-systems/libcloud,apache/libcloud,JamesGuthrie/libcloud,ByteInternet/libcloud,sergiorua/libcloud,samuelchong/libcloud,lochiiconnectivity/libcloud,dcorbacho/libcloud,jerryblakley/libcloud,NexusIS/libcloud,Verizon/libcloud,mtekel/libcloud,watermelo/libcloud,jerryblakley/libcloud,StackPointCloud/libcloud,wuyuewen/libcloud,Itxaka/libcloud,smaffulli/libcloud,Kami/libcloud,Itxaka/libcloud,wrigri/libcloud,cryptickp/libcloud,ByteInternet/libcloud,vongazman/libcloud,supertom/libcloud,illfelder/libcloud,lochiiconnectivity/libcloud,wido/libcloud,vongazman/libcloud,mgogoulos/libcloud,iPlantCollaborativeOpenSource/libcloud,carletes/libcloud,ClusterHQ/libcloud,sgammon/libcloud,Cloud-Elasticity-Services/as-libcloud,marcinzaremba/libcloud,mbrukman/libcloud,andrewsomething/libcloud,t-tran/libcloud,andrewsomething/libcloud,ClusterHQ/libcloud,niteoweb/libcloud,erjohnso/libcloud,atsaki/libcloud,pantheon-systems/libcloud,cloudControl/libcloud,mistio/libcloud,NexusIS/libcloud,jimbobhickville/libcloud,marcinzaremba/libcloud,mbrukman/libcloud,aleGpereira/libcloud,lochiiconnectivity/libcloud,wido/libcloud,cryptickp/libcloud,wrigri/libcloud,atsaki/libcloud,cloudControl/libcloud,schaubl/libcloud,aviweit/libcloud,DimensionDataCBUSydney/libcloud,Kami/libcloud,SecurityCompass/libcloud,iPlantCollaborativeOpenSource/libcloud,cloudControl/libcloud,carletes/libcloud,dcorbacho/libcloud,NexusIS/libcloud,mistio/libcloud,carletes/libcloud,ZuluPro/libcloud,briancurtin/libcloud,mtekel/libcloud,munkiat/libcloud,MrBasset/libcloud,techhat/libcloud,MrBasset/libcloud,StackPointCloud/libcloud,schaubl/libcloud,vongazman/libcloud,jerryblakley/libcloud,t-tran/libcloud,marcinzaremba/libcloud,mathspace/libcloud,smaffulli/libcloud,dcorbacho/libcloud
|
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
EC2_ACCESS_ID = 'your access id'
EC2_SECRET_KEY = 'your secret key'
cls = get_driver(Provider.EC2)
driver = cls(EC2_ACCESS_ID, EC2_SECRET_KEY)
sizes = driver.list_sizes()
- >>> sizes[:5]
+ # >>> sizes[:2]
- [<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None price=0.02 driver=Amazon EC2 ...>,
+ # [<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None
+ # price=0.02 driver=Amazon EC2 ...>,
- <NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None price=0.065 driver=Amazon EC2 ...>,
+ # <NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None
+ # price=0.065 driver=Amazon EC2 ...>,
- <NodeSize: id=m1.medium, name=Medium Instance, ram=3700 disk=410 bandwidth=None price=0.13 driver=Amazon EC2 ...>,
- <NodeSize: id=m1.large, name=Large Instance, ram=7680 disk=850 bandwidth=None price=0.26 driver=Amazon EC2 ...>,
- <NodeSize: id=m1.xlarge, name=Extra Large Instance, ram=15360 disk=1690 bandwidth=None price=0.52 driver=Amazon EC2 ...>]
- >>> sizes[0].price
+ # >>> sizes[0].price
- 0.02
+ # 0.02
- >>>
+ # >>>
|
Fix pep8 violations in the doc examples.
|
## Code Before:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
EC2_ACCESS_ID = 'your access id'
EC2_SECRET_KEY = 'your secret key'
cls = get_driver(Provider.EC2)
driver = cls(EC2_ACCESS_ID, EC2_SECRET_KEY)
sizes = driver.list_sizes()
>>> sizes[:5]
[<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None price=0.02 driver=Amazon EC2 ...>,
<NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None price=0.065 driver=Amazon EC2 ...>,
<NodeSize: id=m1.medium, name=Medium Instance, ram=3700 disk=410 bandwidth=None price=0.13 driver=Amazon EC2 ...>,
<NodeSize: id=m1.large, name=Large Instance, ram=7680 disk=850 bandwidth=None price=0.26 driver=Amazon EC2 ...>,
<NodeSize: id=m1.xlarge, name=Extra Large Instance, ram=15360 disk=1690 bandwidth=None price=0.52 driver=Amazon EC2 ...>]
>>> sizes[0].price
0.02
>>>
## Instruction:
Fix pep8 violations in the doc examples.
## Code After:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
EC2_ACCESS_ID = 'your access id'
EC2_SECRET_KEY = 'your secret key'
cls = get_driver(Provider.EC2)
driver = cls(EC2_ACCESS_ID, EC2_SECRET_KEY)
sizes = driver.list_sizes()
# >>> sizes[:2]
# [<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None
# price=0.02 driver=Amazon EC2 ...>,
# <NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None
# price=0.065 driver=Amazon EC2 ...>,
# >>> sizes[0].price
# 0.02
# >>>
|
# ... existing code ...
# >>> sizes[:2]
# [<NodeSize: id=t1.micro, name=Micro Instance, ram=613 disk=15 bandwidth=None
# price=0.02 driver=Amazon EC2 ...>,
# <NodeSize: id=m1.small, name=Small Instance, ram=1740 disk=160 bandwidth=None
# price=0.065 driver=Amazon EC2 ...>,
# >>> sizes[0].price
# 0.02
# >>>
# ... rest of the code ...
|
1f3eb1c526171b0ee8d2cab05e182c067bfb6c2e
|
tests/unit/modules/defaults_test.py
|
tests/unit/modules/defaults_test.py
|
'''
:codeauthor: :email:`Jayesh Kariya <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
# 'get' function tests: 1
def test_get(self):
'''
Test if it execute a defaults client run and return a dict
'''
mock = MagicMock(return_value='')
with patch.dict(defaults.__salt__, {'pillar.get': mock}):
self.assertEqual(defaults.get('core:users:root'), '')
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
|
'''
:codeauthor: :email:`Jayesh Kariya <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
|
Remove useless mocked unit test
|
Remove useless mocked unit test
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
:codeauthor: :email:`Jayesh Kariya <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
- # 'get' function tests: 1
-
- def test_get(self):
- '''
- Test if it execute a defaults client run and return a dict
- '''
- mock = MagicMock(return_value='')
- with patch.dict(defaults.__salt__, {'pillar.get': mock}):
- self.assertEqual(defaults.get('core:users:root'), '')
-
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
|
Remove useless mocked unit test
|
## Code Before:
'''
:codeauthor: :email:`Jayesh Kariya <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
# 'get' function tests: 1
def test_get(self):
'''
Test if it execute a defaults client run and return a dict
'''
mock = MagicMock(return_value='')
with patch.dict(defaults.__salt__, {'pillar.get': mock}):
self.assertEqual(defaults.get('core:users:root'), '')
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
## Instruction:
Remove useless mocked unit test
## Code After:
'''
:codeauthor: :email:`Jayesh Kariya <[email protected]>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import inspect
# Import Salt Libs
from salt.modules import defaults
# Globals
defaults.__grains__ = {}
defaults.__salt__ = {}
defaults.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DefaultsTestCase(TestCase):
'''
Test cases for salt.modules.defaults
'''
@patch('salt.modules.defaults.get',
MagicMock(return_value={'users': {'root': [0]}}))
def test_get_mock(self):
'''
Test if it execute a defaults client run and return a dict
'''
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
self.assertEqual(defaults.get('core:users:root'),
{'users': {'root': [0]}})
if __name__ == '__main__':
from integration import run_tests
run_tests(DefaultsTestCase, needs_daemon=False)
|
...
'''
@patch('salt.modules.defaults.get',
...
|
1e218ba94c774372929d890780ab12efbfaae181
|
core/management/commands/heroku.py
|
core/management/commands/heroku.py
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.core.management import call_command
class Command(BaseCommand):
help = 'Creates a superuser for Heroku'
def handle(self, *args, **kwargs):
verbosity = kwargs['verbosity']
call_command('migrate', verbosity=0)
User.objects.create_superuser(
username='admin',
email='[email protected]',
password='changeme123'
)
if verbosity > 0:
self.stdout.write(
self.style.SUCCESS('Successfully run all Heroku commands.')
)
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.core.management import call_command
class Command(BaseCommand):
help = 'Runs migrations for Heroku'
def handle(self, *args, **kwargs):
verbosity = kwargs['verbosity']
call_command('migrate', verbosity=0)
if verbosity > 0:
self.stdout.write(
self.style.SUCCESS('Successfully ran all Heroku commands.')
)
|
Remove Heroku createsuperuser command. Migrate now creates a default user.
|
Remove Heroku createsuperuser command. Migrate now creates a default user.
|
Python
|
bsd-2-clause
|
cdubz/timestrap,muhleder/timestrap,muhleder/timestrap,muhleder/timestrap,overshard/timestrap,cdubz/timestrap,overshard/timestrap,overshard/timestrap,cdubz/timestrap
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.core.management import call_command
class Command(BaseCommand):
- help = 'Creates a superuser for Heroku'
+ help = 'Runs migrations for Heroku'
def handle(self, *args, **kwargs):
verbosity = kwargs['verbosity']
call_command('migrate', verbosity=0)
- User.objects.create_superuser(
- username='admin',
- email='[email protected]',
- password='changeme123'
- )
-
if verbosity > 0:
self.stdout.write(
- self.style.SUCCESS('Successfully run all Heroku commands.')
+ self.style.SUCCESS('Successfully ran all Heroku commands.')
)
|
Remove Heroku createsuperuser command. Migrate now creates a default user.
|
## Code Before:
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.core.management import call_command
class Command(BaseCommand):
help = 'Creates a superuser for Heroku'
def handle(self, *args, **kwargs):
verbosity = kwargs['verbosity']
call_command('migrate', verbosity=0)
User.objects.create_superuser(
username='admin',
email='[email protected]',
password='changeme123'
)
if verbosity > 0:
self.stdout.write(
self.style.SUCCESS('Successfully run all Heroku commands.')
)
## Instruction:
Remove Heroku createsuperuser command. Migrate now creates a default user.
## Code After:
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.core.management import call_command
class Command(BaseCommand):
help = 'Runs migrations for Heroku'
def handle(self, *args, **kwargs):
verbosity = kwargs['verbosity']
call_command('migrate', verbosity=0)
if verbosity > 0:
self.stdout.write(
self.style.SUCCESS('Successfully ran all Heroku commands.')
)
|
# ... existing code ...
class Command(BaseCommand):
help = 'Runs migrations for Heroku'
# ... modified code ...
if verbosity > 0:
...
self.stdout.write(
self.style.SUCCESS('Successfully ran all Heroku commands.')
)
# ... rest of the code ...
|
72045f86b25b396160e1a4c9237e977ed575afb2
|
apps/catalogue/constants.py
|
apps/catalogue/constants.py
|
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
Support for 'deed.pl' license URL.
|
Support for 'deed.pl' license URL.
|
Python
|
agpl-3.0
|
fnp/wolnelektury,fnp/wolnelektury,fnp/wolnelektury,fnp/wolnelektury
|
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
+ LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
+ LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
Support for 'deed.pl' license URL.
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
## Instruction:
Support for 'deed.pl' license URL.
## Code After:
from django.utils.translation import ugettext_lazy as _
LICENSES = {
'http://creativecommons.org/licenses/by-sa/3.0/': {
'icon': 'cc-by-sa',
'description': _('Creative Commons Attribution-ShareAlike 3.0 Unported'),
},
}
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# Those will be generated only for books with own HTML.
EBOOK_FORMATS_WITHOUT_CHILDREN = ['txt', 'fb2']
# Those will be generated for all books.
EBOOK_FORMATS_WITH_CHILDREN = ['pdf', 'epub', 'mobi']
# Those will be generated when inherited cover changes.
EBOOK_FORMATS_WITH_COVERS = ['pdf', 'epub', 'mobi']
EBOOK_FORMATS = EBOOK_FORMATS_WITHOUT_CHILDREN + EBOOK_FORMATS_WITH_CHILDREN
|
# ... existing code ...
}
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/deed.pl'] = \
LICENSES['http://creativecommons.org/licenses/by-sa/3.0/']
# ... rest of the code ...
|
b535dcc490f56a54b92443172ad0b5828bc5a540
|
rpcd/playbooks/roles/horizon_extensions/templates/_50_rackspace.py
|
rpcd/playbooks/roles/horizon_extensions/templates/_50_rackspace.py
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
Fix enabled file installed from horizon-extensions
|
Fix enabled file installed from horizon-extensions
Add the angularjs module containing the Rackspace Solutions
panel code to the Horizon application so it works.
Requires accompanying patch
https://github.com/rcbops/horizon-extensions/pull/7
for the panel to work with this change.
closes 891
|
Python
|
apache-2.0
|
cfarquhar/rpc-openstack,galstrom21/rpc-openstack,mancdaz/rpc-openstack,sigmavirus24/rpc-openstack,cloudnull/rpc-openstack,major/rpc-openstack,darrenchan/rpc-openstack,cfarquhar/rpc-openstack,mancdaz/rpc-openstack,git-harry/rpc-openstack,darrenchan/rpc-openstack,xeregin/rpc-openstack,prometheanfire/rpc-openstack,robb-romans/rpc-openstack,shannonmitchell/rpc-openstack,rcbops/rpc-openstack,darrenchan/rpc-openstack,sigmavirus24/rpc-openstack,rcbops/rpc-openstack,git-harry/rpc-openstack,jacobwagner/rpc-openstack,byronmccollum/rpc-openstack,sigmavirus24/rpc-openstack,byronmccollum/rpc-openstack,xeregin/rpc-openstack,byronmccollum/rpc-openstack,sigmavirus24/rpc-openstack,xeregin/rpc-openstack,hughsaunders/rpc-openstack,galstrom21/rpc-openstack,cloudnull/rpc-openstack,BjoernT/rpc-openstack,major/rpc-openstack,shannonmitchell/rpc-openstack,xeregin/rpc-openstack,darrenchan/rpc-openstack,robb-romans/rpc-openstack,prometheanfire/rpc-openstack,BjoernT/rpc-openstack,jacobwagner/rpc-openstack,hughsaunders/rpc-openstack
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
+ ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
+
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
Fix enabled file installed from horizon-extensions
|
## Code Before:
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
## Instruction:
Fix enabled file installed from horizon-extensions
## Code After:
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
# ... existing code ...
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
# ... rest of the code ...
|
649c70527ae602512cfa6ea62b60ebc43fc69797
|
lab/run_trace.py
|
lab/run_trace.py
|
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
|
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
Make this useful for py3 also
|
Make this useful for py3 also
|
Python
|
apache-2.0
|
hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,nedbat/coveragepy,hugovk/coveragepy
|
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
+ code = open(the_program).read()
sys.settrace(trace)
- execfile(the_program)
+ exec(code)
|
Make this useful for py3 also
|
## Code Before:
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
sys.settrace(trace)
execfile(the_program)
## Instruction:
Make this useful for py3 also
## Code After:
"""Run a simple trace function on a file of Python code."""
import os, sys
nest = 0
def trace(frame, event, arg):
global nest
if nest is None:
# This can happen when Python is shutting down.
return None
print("%s%s %s %d @%d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
frame.f_lasti,
))
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
the_program = sys.argv[1]
code = open(the_program).read()
sys.settrace(trace)
exec(code)
|
# ... existing code ...
code = open(the_program).read()
sys.settrace(trace)
exec(code)
# ... rest of the code ...
|
562fa35a036a43526b55546d97490b3f36001a18
|
robotpy_ext/misc/periodic_filter.py
|
robotpy_ext/misc/periodic_filter.py
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
Create example usage. Rename bypass_level
|
Create example usage. Rename bypass_level
|
Python
|
bsd-3-clause
|
robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
|
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
- The logger will always print logging levels of WARNING or higher
+ The logger will always print logging levels of WARNING or higher,
+ unless given a different bypass level
+
+ Example
+
+ class Component1:
+
+ def setup(self):
+ # Set period to 3 seconds, set bypass_level to WARN
+ self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
+
+ def execute(self):
+ # This message will be printed once every three seconds
+ self.logger.info('Component1 Executing')
+
+ # This message will be printed out every loop
+ self.logger.warn('Uh oh, this shouldn't have happened...')
+
"""
- def __init__(self, period, bypassLevel=logging.WARN):
+ def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
- :param bypassLevel: Lowest logging level that the filter should ignore
+ :param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
- self._bypassLevel = bypassLevel
+ self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
- return self._loggingLoop or record.levelno >= self._bypassLevel
+ return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
Create example usage. Rename bypass_level
|
## Code Before:
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypassLevel: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
## Instruction:
Create example usage. Rename bypass_level
## Code After:
import logging
import time
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
def __init__(self, period, bypass_level=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
self._period = period
self._loggingLoop = True
self._last_log = -period
self._bypass_level = bypass_level
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = time.monotonic()
self._loggingLoop = False
if now - self._last_log > self._period:
self._loggingLoop = True
self._last_log = now
|
# ... existing code ...
The logger will always print logging levels of WARNING or higher,
unless given a different bypass level
Example
class Component1:
def setup(self):
# Set period to 3 seconds, set bypass_level to WARN
self.logger.addFilter(PeriodicFilter(3, bypass_level=logging.WARN))
def execute(self):
# This message will be printed once every three seconds
self.logger.info('Component1 Executing')
# This message will be printed out every loop
self.logger.warn('Uh oh, this shouldn't have happened...')
"""
# ... modified code ...
def __init__(self, period, bypass_level=logging.WARN):
'''
...
:param period: Wait period (in seconds) between logs
:param bypass_level: Lowest logging level that the filter should ignore
'''
...
self._last_log = -period
self._bypass_level = bypass_level
...
self._refresh_logger()
return self._loggingLoop or record.levelno >= self._bypass_level
# ... rest of the code ...
|
986b15b5f33ebf25b26f40645378174bb66f1898
|
gerberlicious.py
|
gerberlicious.py
|
from gerberlicious.point import Point
from gerberlicious.layer import Layer
from gerberlicious.aperture import CircleAperture
from gerberlicious.drawable import PointList, ApertureFlash
from gerberlicious.render import GerberRenderer, SVGRenderer
if __name__ == "__main__":
layer = Layer()
aperture1 = CircleAperture("10", 0.1)
layer.add_aperture(aperture1)
aperture2 = CircleAperture("11", 0.2, 0.1)
layer.add_aperture(aperture2)
square = PointList(aperture1)
square.add_point(Point(2.5, 0))
square.add_point(Point(5, 0))
square.add_point(Point(5, 5))
square.add_point(Point(0, 5))
square.add_point(Point(0, 2.5))
square.add_point(Point(2.5, 0))
layer.add_shape(square)
donut = ApertureFlash(aperture2, Point(0, 5))
layer.add_shape(donut)
gr = GerberRenderer(layer)
gr.write_file("out.grb")
sr = SVGRenderer(layer)
sr.write_file("out.svg")
|
from gerberlicious.point import Point
from gerberlicious.layer import Layer
from gerberlicious.aperture import CircleAperture
from gerberlicious.drawable import PointList, ApertureFlash
from gerberlicious.render import GerberRenderer, SVGRenderer
if __name__ == "__main__":
layer = Layer()
aperture1 = CircleAperture("10", 0.1)
layer.add_aperture(aperture1)
aperture2 = CircleAperture("11", 0.2, 0.1)
layer.add_aperture(aperture2)
path = PointList(aperture1)
path.add_point(Point(2.5, 0))
path.add_point(Point(5, 0))
path.add_point(Point(5, 5))
path.add_point(Point(0, 5))
path.add_point(Point(0, 2.5))
path.add_point(Point(2.5, 0))
layer.add_shape(path)
donut = ApertureFlash(aperture2, Point(0, 5))
layer.add_shape(donut)
gr = GerberRenderer(layer)
gr.write_file("out.grb")
sr = SVGRenderer(layer)
sr.write_file("out.svg")
|
Rename 'square' to 'path' in example script
|
Rename 'square' to 'path' in example script
|
Python
|
mit
|
deveah/gerberlicious
|
from gerberlicious.point import Point
from gerberlicious.layer import Layer
from gerberlicious.aperture import CircleAperture
from gerberlicious.drawable import PointList, ApertureFlash
from gerberlicious.render import GerberRenderer, SVGRenderer
if __name__ == "__main__":
layer = Layer()
aperture1 = CircleAperture("10", 0.1)
layer.add_aperture(aperture1)
aperture2 = CircleAperture("11", 0.2, 0.1)
layer.add_aperture(aperture2)
- square = PointList(aperture1)
+ path = PointList(aperture1)
- square.add_point(Point(2.5, 0))
+ path.add_point(Point(2.5, 0))
- square.add_point(Point(5, 0))
+ path.add_point(Point(5, 0))
- square.add_point(Point(5, 5))
+ path.add_point(Point(5, 5))
- square.add_point(Point(0, 5))
+ path.add_point(Point(0, 5))
- square.add_point(Point(0, 2.5))
+ path.add_point(Point(0, 2.5))
- square.add_point(Point(2.5, 0))
+ path.add_point(Point(2.5, 0))
- layer.add_shape(square)
+ layer.add_shape(path)
donut = ApertureFlash(aperture2, Point(0, 5))
layer.add_shape(donut)
gr = GerberRenderer(layer)
gr.write_file("out.grb")
sr = SVGRenderer(layer)
sr.write_file("out.svg")
|
Rename 'square' to 'path' in example script
|
## Code Before:
from gerberlicious.point import Point
from gerberlicious.layer import Layer
from gerberlicious.aperture import CircleAperture
from gerberlicious.drawable import PointList, ApertureFlash
from gerberlicious.render import GerberRenderer, SVGRenderer
if __name__ == "__main__":
layer = Layer()
aperture1 = CircleAperture("10", 0.1)
layer.add_aperture(aperture1)
aperture2 = CircleAperture("11", 0.2, 0.1)
layer.add_aperture(aperture2)
square = PointList(aperture1)
square.add_point(Point(2.5, 0))
square.add_point(Point(5, 0))
square.add_point(Point(5, 5))
square.add_point(Point(0, 5))
square.add_point(Point(0, 2.5))
square.add_point(Point(2.5, 0))
layer.add_shape(square)
donut = ApertureFlash(aperture2, Point(0, 5))
layer.add_shape(donut)
gr = GerberRenderer(layer)
gr.write_file("out.grb")
sr = SVGRenderer(layer)
sr.write_file("out.svg")
## Instruction:
Rename 'square' to 'path' in example script
## Code After:
from gerberlicious.point import Point
from gerberlicious.layer import Layer
from gerberlicious.aperture import CircleAperture
from gerberlicious.drawable import PointList, ApertureFlash
from gerberlicious.render import GerberRenderer, SVGRenderer
if __name__ == "__main__":
layer = Layer()
aperture1 = CircleAperture("10", 0.1)
layer.add_aperture(aperture1)
aperture2 = CircleAperture("11", 0.2, 0.1)
layer.add_aperture(aperture2)
path = PointList(aperture1)
path.add_point(Point(2.5, 0))
path.add_point(Point(5, 0))
path.add_point(Point(5, 5))
path.add_point(Point(0, 5))
path.add_point(Point(0, 2.5))
path.add_point(Point(2.5, 0))
layer.add_shape(path)
donut = ApertureFlash(aperture2, Point(0, 5))
layer.add_shape(donut)
gr = GerberRenderer(layer)
gr.write_file("out.grb")
sr = SVGRenderer(layer)
sr.write_file("out.svg")
|
# ... existing code ...
path = PointList(aperture1)
path.add_point(Point(2.5, 0))
path.add_point(Point(5, 0))
path.add_point(Point(5, 5))
path.add_point(Point(0, 5))
path.add_point(Point(0, 2.5))
path.add_point(Point(2.5, 0))
layer.add_shape(path)
# ... rest of the code ...
|
4b30b6dd4eb24c36cd32d37bf6555be79cdc80a8
|
scripts/maf_split_by_src.py
|
scripts/maf_split_by_src.py
|
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
Allow splitting by a particular component (by index)
|
Allow splitting by a particular component (by index)
|
Python
|
mit
|
uhjish/bx-python,uhjish/bx-python,uhjish/bx-python
|
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
+ parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
+ comp = options.component
+ if comp is not None:
+ comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
-
+
+ if comp is None:
- writer_key = string.join( [ c.src for c in m.components ], '_' )
+ writer_key = string.join( [ c.src for c in m.components ], '_' )
+ else:
+ writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
Allow splitting by a particular component (by index)
|
## Code Before:
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
## Instruction:
Allow splitting by a particular component (by index)
## Code After:
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
...
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
...
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
...
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
...
|
8e0afc06d221d86677a172fdb7d1388225504ba6
|
resp/__main__.py
|
resp/__main__.py
|
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
|
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
Add specific required-property to all arguments
|
Add specific required-property to all arguments
|
Python
|
mit
|
nok/resp,nok/resp
|
- import sys
import argparse
from Parser import Parser
- def main(argv):
+ def main():
# Arguments:
parser = argparse.ArgumentParser()
- parser.add_argument('-r', '--redis_cmd', type=str, default='')
+ parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
- parser.add_argument('-i', '--input', type=str, default='')
+ parser.add_argument('-i', '--input', type=str, default='', required=False)
- parser.add_argument('-d', '--delimiter', type=str, default=',')
+ parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
- parser.add_argument('-p', '--pipe', action='store_true')
+ parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
- main(sys.argv[1:])
+ main()
|
Add specific required-property to all arguments
|
## Code Before:
import sys
import argparse
from Parser import Parser
def main(argv):
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='')
parser.add_argument('-i', '--input', type=str, default='')
parser.add_argument('-d', '--delimiter', type=str, default=',')
parser.add_argument('-p', '--pipe', action='store_true')
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main(sys.argv[1:])
## Instruction:
Add specific required-property to all arguments
## Code After:
import argparse
from Parser import Parser
def main():
# Arguments:
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
# Parser:
Parser(args.input, args.redis_cmd, args.delimiter, args.pipe)
if __name__ == "__main__":
main()
|
...
import argparse
...
def main():
...
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--redis_cmd', type=str, default='', required=True)
parser.add_argument('-i', '--input', type=str, default='', required=False)
parser.add_argument('-d', '--delimiter', type=str, default=',', required=False)
parser.add_argument('-p', '--pipe', action='store_true', required=False)
args = parser.parse_args()
...
if __name__ == "__main__":
main()
...
|
4c85300c5458053ac08a393b00513c80baf28031
|
reqon/deprecated/__init__.py
|
reqon/deprecated/__init__.py
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(reql, query['$query'])
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
Fix arguments order of reqon.deprecated.build_terms().
|
Fix arguments order of reqon.deprecated.build_terms().
|
Python
|
mit
|
dmpayton/reqon
|
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
- return build_terms(query['$query'], reql)
+ return build_terms(reql, query['$query'])
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
Fix arguments order of reqon.deprecated.build_terms().
|
## Code Before:
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
## Instruction:
Fix arguments order of reqon.deprecated.build_terms().
## Code After:
import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(reql, query['$query'])
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
|
// ... existing code ...
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(reql, query['$query'])
// ... rest of the code ...
|
34452461de40c35d28bfc8b2a18b6fa14ebb875d
|
onadata/apps/fsforms/management/commands/update_mongo_value_type.py
|
onadata/apps/fsforms/management/commands/update_mongo_value_type.py
|
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Update value type of fs_site and fs_uuid in mongo instances to make string type to int type"
def handle(self, *args, **kwargs):
xform_instances = settings.MONGO_DB.instances
#type 2 is for string type
query = {'$or':[{'fs_site':{'$type':2}}, {'fs_uuid':{'$type':2}}]}
cursor = xform_instances.find(query)
for record in cursor:
fs_site = record['fs_site']
if fs_site == '':
new_fs_site = None
else:
new_fs_site = int(fs_site)
fs_uuid = record['fs_uuid']
if fs_uuid == '':
new_fs_uuid = None
elif fs_uuid == 'None':
new_fs_uuid = None
else:
new_fs_uuid = int(fs_uuid)
xform_instances.update({'_id':record['_id']},{'$set':{'fs_site':new_fs_site, 'fs_uuid':new_fs_uuid}})
print('Updating mongo instance for ' + str(record['_id']))
print('Mongo instances updated.......')
|
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Update value type of fs_site and fs_uuid in mongo instances to make string type to int type"
def handle(self, *args, **kwargs):
xform_instances = settings.MONGO_DB.instances
#type 2 is for string type
query = {'$or':[{'fs_site':{'$type':2}}, {'fs_uuid':{'$type':2}}]}
cursor = xform_instances.find(query)
for record in cursor:
fs_site = record['fs_site']
if fs_site == '':
new_fs_site = None
else:
new_fs_site = int(fs_site)
fs_uuid = record['fs_uuid']
if fs_uuid == '':
new_fs_uuid = None
elif fs_uuid == 'None':
new_fs_uuid = None
elif fs_uuid == 'NoneType':
new_fs_uuid = None
else:
new_fs_uuid = int(fs_uuid)
xform_instances.update({'_id':record['_id']},{'$set':{'fs_site':new_fs_site, 'fs_uuid':new_fs_uuid}})
print('Updating mongo instance for ' + str(record['_id']))
print('Mongo instances updated.......')
|
Handle cases update mongo value type
|
Handle cases update mongo value type
|
Python
|
bsd-2-clause
|
awemulya/fieldsight-kobocat,awemulya/fieldsight-kobocat,awemulya/fieldsight-kobocat,awemulya/fieldsight-kobocat
|
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Update value type of fs_site and fs_uuid in mongo instances to make string type to int type"
def handle(self, *args, **kwargs):
xform_instances = settings.MONGO_DB.instances
#type 2 is for string type
query = {'$or':[{'fs_site':{'$type':2}}, {'fs_uuid':{'$type':2}}]}
cursor = xform_instances.find(query)
for record in cursor:
fs_site = record['fs_site']
if fs_site == '':
new_fs_site = None
else:
new_fs_site = int(fs_site)
fs_uuid = record['fs_uuid']
if fs_uuid == '':
new_fs_uuid = None
elif fs_uuid == 'None':
new_fs_uuid = None
+ elif fs_uuid == 'NoneType':
+ new_fs_uuid = None
else:
new_fs_uuid = int(fs_uuid)
xform_instances.update({'_id':record['_id']},{'$set':{'fs_site':new_fs_site, 'fs_uuid':new_fs_uuid}})
print('Updating mongo instance for ' + str(record['_id']))
print('Mongo instances updated.......')
|
Handle cases update mongo value type
|
## Code Before:
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Update value type of fs_site and fs_uuid in mongo instances to make string type to int type"
def handle(self, *args, **kwargs):
xform_instances = settings.MONGO_DB.instances
#type 2 is for string type
query = {'$or':[{'fs_site':{'$type':2}}, {'fs_uuid':{'$type':2}}]}
cursor = xform_instances.find(query)
for record in cursor:
fs_site = record['fs_site']
if fs_site == '':
new_fs_site = None
else:
new_fs_site = int(fs_site)
fs_uuid = record['fs_uuid']
if fs_uuid == '':
new_fs_uuid = None
elif fs_uuid == 'None':
new_fs_uuid = None
else:
new_fs_uuid = int(fs_uuid)
xform_instances.update({'_id':record['_id']},{'$set':{'fs_site':new_fs_site, 'fs_uuid':new_fs_uuid}})
print('Updating mongo instance for ' + str(record['_id']))
print('Mongo instances updated.......')
## Instruction:
Handle cases update mongo value type
## Code After:
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Update value type of fs_site and fs_uuid in mongo instances to make string type to int type"
def handle(self, *args, **kwargs):
xform_instances = settings.MONGO_DB.instances
#type 2 is for string type
query = {'$or':[{'fs_site':{'$type':2}}, {'fs_uuid':{'$type':2}}]}
cursor = xform_instances.find(query)
for record in cursor:
fs_site = record['fs_site']
if fs_site == '':
new_fs_site = None
else:
new_fs_site = int(fs_site)
fs_uuid = record['fs_uuid']
if fs_uuid == '':
new_fs_uuid = None
elif fs_uuid == 'None':
new_fs_uuid = None
elif fs_uuid == 'NoneType':
new_fs_uuid = None
else:
new_fs_uuid = int(fs_uuid)
xform_instances.update({'_id':record['_id']},{'$set':{'fs_site':new_fs_site, 'fs_uuid':new_fs_uuid}})
print('Updating mongo instance for ' + str(record['_id']))
print('Mongo instances updated.......')
|
// ... existing code ...
new_fs_uuid = None
elif fs_uuid == 'NoneType':
new_fs_uuid = None
else:
// ... rest of the code ...
|
822e6123cc598b4f6a0eafedfb2f0d0cbfba5f37
|
currencies/migrations/0003_auto_20151216_1906.py
|
currencies/migrations/0003_auto_20151216_1906.py
|
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
currency.save()
currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
|
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''):
pass
else:
currency.save()
currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
|
Fix currencies seeding, so it won't have empty currencies
|
Fix currencies seeding, so it won't have empty currencies
|
Python
|
mit
|
openspending/cosmopolitan,kiote/cosmopolitan
|
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
+ if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''):
+ pass
+ else:
- currency.save()
+ currency.save()
- currency.countries.add(extra_country.pk)
+ currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
|
Fix currencies seeding, so it won't have empty currencies
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
currency.save()
currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
## Instruction:
Fix currencies seeding, so it won't have empty currencies
## Code After:
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''):
pass
else:
currency.save()
currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
|
...
name=extra_country.country.currency_name)
if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''):
pass
else:
currency.save()
currency.countries.add(extra_country.pk)
...
|
5dfd723b37e208c1b81e65cd2df1b7d9226493b3
|
numpy/_array_api/_sorting_functions.py
|
numpy/_array_api/_sorting_functions.py
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
|
Add missing returns to the array API sorting functions
|
Add missing returns to the array API sorting functions
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
+ return res
|
Add missing returns to the array API sorting functions
|
## Code Before:
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
## Instruction:
Add missing returns to the array API sorting functions
## Code After:
def argsort(x, /, *, axis=-1, descending=False, stable=True):
from .. import argsort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = argsort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
def sort(x, /, *, axis=-1, descending=False, stable=True):
from .. import sort
from .. import flip
# Note: this keyword argument is different, and the default is different.
kind = 'stable' if stable else 'quicksort'
res = sort(x, axis=axis, kind=kind)
if descending:
res = flip(res, axis=axis)
return res
|
# ... existing code ...
res = flip(res, axis=axis)
return res
# ... modified code ...
res = flip(res, axis=axis)
return res
# ... rest of the code ...
|
f1fedff9247b78120df7335b64cdf46c8f60ef03
|
test/test_fixtures.py
|
test/test_fixtures.py
|
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
|
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
class TestClass:
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(self, io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
|
Add some test for method signature inspection
|
Add some test for method signature inspection
|
Python
|
apache-2.0
|
eugeniy/pytest-tornado
|
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
+ class TestClass:
+ @pytest.mark.gen_test
+ def test_uses_pytestmark_fixtures(self, io_loop):
+ assert (yield dummy(io_loop))
+ assert _used_fixture
+
|
Add some test for method signature inspection
|
## Code Before:
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
## Instruction:
Add some test for method signature inspection
## Code After:
import pytest
from tornado import gen
_used_fixture = False
@gen.coroutine
def dummy(io_loop):
yield gen.Task(io_loop.add_callback)
raise gen.Return(True)
@pytest.fixture(scope='module')
def preparations():
global _used_fixture
_used_fixture = True
pytestmark = pytest.mark.usefixtures('preparations')
@pytest.mark.xfail(pytest.__version__ < '2.7.0',
reason='py.test 2.7 adds hookwrapper, fixes collection')
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
class TestClass:
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(self, io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
|
# ... existing code ...
assert _used_fixture
class TestClass:
@pytest.mark.gen_test
def test_uses_pytestmark_fixtures(self, io_loop):
assert (yield dummy(io_loop))
assert _used_fixture
# ... rest of the code ...
|
d3438e85ab4158d769b0662729a8aff7d143971a
|
csv_ical/tests/test_convert.py
|
csv_ical/tests/test_convert.py
|
import unittest
from csv_ical import convert
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
self.convert._generate_configs_from_default()
|
import datetime
import os
import tempfile
import unittest
from syspath import get_git_root
from csv_ical import convert
EXAMPLE_ICS = os.path.join(get_git_root(), 'examples', 'arrive.ics')
EXAMPLE_CSV = os.path.join(get_git_root(), 'examples', 'BostonCruiseTerminalSchedule.csv')
CSV_CONFIGS = {
'HEADER_COLUMNS_TO_SKIP': 2,
'CSV_NAME': 3,
'CSV_START_DATE': 7,
'CSV_END_DATE': 8,
'CSV_DESCRIPTION': 6,
'CSV_LOCATION': 9,
}
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
config = self.convert._generate_configs_from_default()
self.assertEqual(config['CSV_NAME'], 0)
def test_generate_configs_override(self):
overrides = {
'CSV_NAME': 5,
}
config = self.convert._generate_configs_from_default(overrides)
self.assertEqual(config['CSV_NAME'], 5)
def test_read_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
self.assertNotEqual(self.convert.cal, None)
def test_read_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
self.assertNotEqual(self.convert.csv_data, [])
def test_make_ical(self):
self.convert.read_csv(EXAMPLE_CSV)
self.convert.csv_data = [self.convert.csv_data[0]]
self.convert.csv_data[0][7] = datetime.datetime.now()
self.convert.csv_data[0][8] = datetime.datetime.now()
self.convert.make_ical(CSV_CONFIGS)
self.assertNotEqual(self.convert.cal, None)
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_save_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_ical(temp.name)
def test_save_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_csv(temp.name)
|
Add tests for all methods
|
Add tests for all methods
|
Python
|
mit
|
albertyw/csv-to-ical
|
+ import datetime
+ import os
+ import tempfile
import unittest
+ from syspath import get_git_root
+
from csv_ical import convert
+
+ EXAMPLE_ICS = os.path.join(get_git_root(), 'examples', 'arrive.ics')
+ EXAMPLE_CSV = os.path.join(get_git_root(), 'examples', 'BostonCruiseTerminalSchedule.csv')
+ CSV_CONFIGS = {
+ 'HEADER_COLUMNS_TO_SKIP': 2,
+ 'CSV_NAME': 3,
+ 'CSV_START_DATE': 7,
+ 'CSV_END_DATE': 8,
+ 'CSV_DESCRIPTION': 6,
+ 'CSV_LOCATION': 9,
+ }
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
- self.convert._generate_configs_from_default()
+ config = self.convert._generate_configs_from_default()
+ self.assertEqual(config['CSV_NAME'], 0)
+ def test_generate_configs_override(self):
+ overrides = {
+ 'CSV_NAME': 5,
+ }
+ config = self.convert._generate_configs_from_default(overrides)
+ self.assertEqual(config['CSV_NAME'], 5)
+
+ def test_read_ical(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ self.assertNotEqual(self.convert.cal, None)
+
+ def test_read_csv(self):
+ self.convert.read_csv(EXAMPLE_CSV)
+ self.assertNotEqual(self.convert.csv_data, [])
+
+ def test_make_ical(self):
+ self.convert.read_csv(EXAMPLE_CSV)
+ self.convert.csv_data = [self.convert.csv_data[0]]
+ self.convert.csv_data[0][7] = datetime.datetime.now()
+ self.convert.csv_data[0][8] = datetime.datetime.now()
+ self.convert.make_ical(CSV_CONFIGS)
+ self.assertNotEqual(self.convert.cal, None)
+
+ def test_make_csv(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ self.convert.make_csv()
+ self.assertNotEqual(self.convert.csv_data, [])
+
+ def test_make_csv(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ self.convert.make_csv()
+ self.assertNotEqual(self.convert.csv_data, [])
+
+ def test_save_ical(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ with tempfile.NamedTemporaryFile() as temp:
+ self.convert.save_ical(temp.name)
+
+ def test_save_csv(self):
+ self.convert.read_csv(EXAMPLE_CSV)
+ with tempfile.NamedTemporaryFile() as temp:
+ self.convert.save_csv(temp.name)
+
|
Add tests for all methods
|
## Code Before:
import unittest
from csv_ical import convert
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
self.convert._generate_configs_from_default()
## Instruction:
Add tests for all methods
## Code After:
import datetime
import os
import tempfile
import unittest
from syspath import get_git_root
from csv_ical import convert
EXAMPLE_ICS = os.path.join(get_git_root(), 'examples', 'arrive.ics')
EXAMPLE_CSV = os.path.join(get_git_root(), 'examples', 'BostonCruiseTerminalSchedule.csv')
CSV_CONFIGS = {
'HEADER_COLUMNS_TO_SKIP': 2,
'CSV_NAME': 3,
'CSV_START_DATE': 7,
'CSV_END_DATE': 8,
'CSV_DESCRIPTION': 6,
'CSV_LOCATION': 9,
}
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
config = self.convert._generate_configs_from_default()
self.assertEqual(config['CSV_NAME'], 0)
def test_generate_configs_override(self):
overrides = {
'CSV_NAME': 5,
}
config = self.convert._generate_configs_from_default(overrides)
self.assertEqual(config['CSV_NAME'], 5)
def test_read_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
self.assertNotEqual(self.convert.cal, None)
def test_read_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
self.assertNotEqual(self.convert.csv_data, [])
def test_make_ical(self):
self.convert.read_csv(EXAMPLE_CSV)
self.convert.csv_data = [self.convert.csv_data[0]]
self.convert.csv_data[0][7] = datetime.datetime.now()
self.convert.csv_data[0][8] = datetime.datetime.now()
self.convert.make_ical(CSV_CONFIGS)
self.assertNotEqual(self.convert.cal, None)
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_save_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_ical(temp.name)
def test_save_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_csv(temp.name)
|
# ... existing code ...
import datetime
import os
import tempfile
import unittest
# ... modified code ...
from syspath import get_git_root
from csv_ical import convert
EXAMPLE_ICS = os.path.join(get_git_root(), 'examples', 'arrive.ics')
EXAMPLE_CSV = os.path.join(get_git_root(), 'examples', 'BostonCruiseTerminalSchedule.csv')
CSV_CONFIGS = {
'HEADER_COLUMNS_TO_SKIP': 2,
'CSV_NAME': 3,
'CSV_START_DATE': 7,
'CSV_END_DATE': 8,
'CSV_DESCRIPTION': 6,
'CSV_LOCATION': 9,
}
...
def test_generate_configs(self):
config = self.convert._generate_configs_from_default()
self.assertEqual(config['CSV_NAME'], 0)
def test_generate_configs_override(self):
overrides = {
'CSV_NAME': 5,
}
config = self.convert._generate_configs_from_default(overrides)
self.assertEqual(config['CSV_NAME'], 5)
def test_read_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
self.assertNotEqual(self.convert.cal, None)
def test_read_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
self.assertNotEqual(self.convert.csv_data, [])
def test_make_ical(self):
self.convert.read_csv(EXAMPLE_CSV)
self.convert.csv_data = [self.convert.csv_data[0]]
self.convert.csv_data[0][7] = datetime.datetime.now()
self.convert.csv_data[0][8] = datetime.datetime.now()
self.convert.make_ical(CSV_CONFIGS)
self.assertNotEqual(self.convert.cal, None)
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_save_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_ical(temp.name)
def test_save_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_csv(temp.name)
# ... rest of the code ...
|
d0380db930dbf145108a7ef0330dd19475f7fdee
|
test_arrange_schedule.py
|
test_arrange_schedule.py
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
Add test case for read_arrange_mode()
|
Add test case for read_arrange_mode()
|
Python
|
apache-2.0
|
Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,stvreumi/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
+
+ def test_read_arrange_mode():
+ keys = ['arrange_sn','arrange_mode','condition']
+ receive_msg = read_arrange_mode()
+ for key in keys:
+ assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
+ test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
Add test case for read_arrange_mode()
|
## Code Before:
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_crawler_cwb_img(system_setting)
print("All test passed")
## Instruction:
Add test case for read_arrange_mode()
## Code After:
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
...
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
...
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
...
|
1b9d453f6fe0d2128849f98922f082d6ccfbee69
|
channelfilter.py
|
channelfilter.py
|
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
|
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
|
Fix channel filtering to work properly
|
Fix channel filtering to work properly
|
Python
|
mit
|
wikimedia/labs-tools-wikibugs2,wikimedia/labs-tools-wikibugs2
|
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
- def channels_for(self, project):
+ def channels_for(self, projects):
"""
- :param project: Get all channels to spam for the given project
+ :param project: Get all channels to spam for given projects
- :type project: basestring
+ :type project: list
"""
channels = set()
for channel in self.config['channels']:
+ for project in projects:
- if project in self.config['channels'][channel]:
+ if project in self.config['channels'][channel]:
- channels.add(channel)
+ channels.add(channel)
- continue
+ break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
- print(channels)
return channels
|
Fix channel filtering to work properly
|
## Code Before:
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
## Instruction:
Fix channel filtering to work properly
## Code After:
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
|
# ... existing code ...
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
# ... modified code ...
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
...
channels.add(self.firehose_channel)
return channels
# ... rest of the code ...
|
4947ebf9460c2cf2ba8338de92601804dec2148a
|
src/svg_icons/templatetags/svg_icons.py
|
src/svg_icons/templatetags/svg_icons.py
|
import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
|
import json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
|
Use the new reader classes in the template tag
|
Use the new reader classes in the template tag
|
Python
|
apache-2.0
|
mikedingjan/django-svg-icons,mikedingjan/django-svg-icons
|
import json
+ from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
+ reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
+
+ try:
+ module, cls = reader_class.rsplit('.', 1)
+ module = import_module(module)
+ Reader = getattr(module, cls)
+ except ImportError:
+ raise ValueError("No valid icon reader class found.")
+
register = Library()
+ icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
+ """Render the SVG icon paths returned by the
+ icon reader in the template.
- """Render a SVG icon defined in a json file to our template.
-
- ..:json example (stripped)::
- {
- "icons": [
- {
- "icon": {
- "paths": [
- "M365.339 474.828c-19.319-12.616-42.222-18.062....."
- ]
- },
- "properties": {
- "name": "tools"
- }
- }
- ]
- }
"""
- cache_key = ':'.join([
- getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
- icon_paths = cache.get(cache_key)
- if not icon_paths:
- source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
- if not source_file:
- raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
-
- data = json.load(open(source_file, 'r'))
- for icon_data in data['icons']:
- if name != icon_data['properties']['name']:
- continue
- icon_paths = icon_data['icon']['paths']
-
- if not icon_paths:
- raise TemplateSyntaxError("Requested icon does not exist")
-
- cache.set(cache_key, icon_paths)
-
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
- 'paths': icon_paths,
+ 'paths': icons.get_svg_paths(name),
}
|
Use the new reader classes in the template tag
|
## Code Before:
import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
## Instruction:
Use the new reader classes in the template tag
## Code After:
import json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
|
// ... existing code ...
import json
from importlib import import_module
// ... modified code ...
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
...
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
...
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
...
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
// ... rest of the code ...
|
839f9edc811776b8898cdf1fa7116eec9aef50a7
|
tests/xmlsec/test_templates.py
|
tests/xmlsec/test_templates.py
|
import xmlsec
def test_create_signature_template():
node = xmlsec.create_signature_template()
assert node.tag.endswith('Signature')
assert node.xpath('*[local-name() = "SignatureValue"]')
assert node.xpath('*[local-name() = "SignedInfo"]')
return node
def test_add_reference():
node = test_create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
assert ref.tag.endswith('Reference')
assert node.xpath('.//*[local-name() = "Reference"]')
|
import xmlsec
def test_create_signature_template():
node = xmlsec.create_signature_template()
assert node.tag.endswith('Signature')
assert node.xpath('*[local-name() = "SignatureValue"]')
assert node.xpath('*[local-name() = "SignedInfo"]')
def test_add_reference():
node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
assert ref.tag.endswith('Reference')
assert node.xpath('.//*[local-name() = "Reference"]')
def test_add_transform():
node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
xmlsec.add_transform(ref, xmlsec.method.ENVELOPED)
assert ref.xpath('.//*[local-name() = "Transform"]')
def test_ensure_key_info():
node = xmlsec.create_signature_template()
xmlsec.ensure_key_info(node)
assert node.xpath('.//*[local-name() = "KeyInfo"]')
def test_add_x509_data():
node = xmlsec.create_signature_template()
info = xmlsec.ensure_key_info(node)
xmlsec.add_x509_data(info)
assert node.xpath('.//*[local-name() = "X509Data"]')
def test_add_key_name():
node = xmlsec.create_signature_template()
info = xmlsec.ensure_key_info(node)
xmlsec.add_key_name(info, b'bob.pem')
assert node.xpath('.//*[local-name() = "KeyName" and text() = "bob.pem"]')
|
Add additional tests for templates.
|
Add additional tests for templates.
|
Python
|
mit
|
devsisters/python-xmlsec,concordusapps/python-xmlsec,mehcode/python-xmlsec,devsisters/python-xmlsec,mehcode/python-xmlsec,concordusapps/python-xmlsec
|
import xmlsec
def test_create_signature_template():
node = xmlsec.create_signature_template()
assert node.tag.endswith('Signature')
assert node.xpath('*[local-name() = "SignatureValue"]')
assert node.xpath('*[local-name() = "SignedInfo"]')
- return node
-
def test_add_reference():
- node = test_create_signature_template()
+ node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
assert ref.tag.endswith('Reference')
assert node.xpath('.//*[local-name() = "Reference"]')
+
+ def test_add_transform():
+ node = xmlsec.create_signature_template()
+ ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
+ xmlsec.add_transform(ref, xmlsec.method.ENVELOPED)
+
+ assert ref.xpath('.//*[local-name() = "Transform"]')
+
+
+ def test_ensure_key_info():
+ node = xmlsec.create_signature_template()
+ xmlsec.ensure_key_info(node)
+
+ assert node.xpath('.//*[local-name() = "KeyInfo"]')
+
+
+ def test_add_x509_data():
+ node = xmlsec.create_signature_template()
+ info = xmlsec.ensure_key_info(node)
+ xmlsec.add_x509_data(info)
+
+ assert node.xpath('.//*[local-name() = "X509Data"]')
+
+
+ def test_add_key_name():
+ node = xmlsec.create_signature_template()
+ info = xmlsec.ensure_key_info(node)
+ xmlsec.add_key_name(info, b'bob.pem')
+
+ assert node.xpath('.//*[local-name() = "KeyName" and text() = "bob.pem"]')
+
|
Add additional tests for templates.
|
## Code Before:
import xmlsec
def test_create_signature_template():
node = xmlsec.create_signature_template()
assert node.tag.endswith('Signature')
assert node.xpath('*[local-name() = "SignatureValue"]')
assert node.xpath('*[local-name() = "SignedInfo"]')
return node
def test_add_reference():
node = test_create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
assert ref.tag.endswith('Reference')
assert node.xpath('.//*[local-name() = "Reference"]')
## Instruction:
Add additional tests for templates.
## Code After:
import xmlsec
def test_create_signature_template():
node = xmlsec.create_signature_template()
assert node.tag.endswith('Signature')
assert node.xpath('*[local-name() = "SignatureValue"]')
assert node.xpath('*[local-name() = "SignedInfo"]')
def test_add_reference():
node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
assert ref.tag.endswith('Reference')
assert node.xpath('.//*[local-name() = "Reference"]')
def test_add_transform():
node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
xmlsec.add_transform(ref, xmlsec.method.ENVELOPED)
assert ref.xpath('.//*[local-name() = "Transform"]')
def test_ensure_key_info():
node = xmlsec.create_signature_template()
xmlsec.ensure_key_info(node)
assert node.xpath('.//*[local-name() = "KeyInfo"]')
def test_add_x509_data():
node = xmlsec.create_signature_template()
info = xmlsec.ensure_key_info(node)
xmlsec.add_x509_data(info)
assert node.xpath('.//*[local-name() = "X509Data"]')
def test_add_key_name():
node = xmlsec.create_signature_template()
info = xmlsec.ensure_key_info(node)
xmlsec.add_key_name(info, b'bob.pem')
assert node.xpath('.//*[local-name() = "KeyName" and text() = "bob.pem"]')
|
...
...
def test_add_reference():
node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
...
assert node.xpath('.//*[local-name() = "Reference"]')
def test_add_transform():
node = xmlsec.create_signature_template()
ref = xmlsec.add_reference(node, uri=b'#_34275907093489075620748690')
xmlsec.add_transform(ref, xmlsec.method.ENVELOPED)
assert ref.xpath('.//*[local-name() = "Transform"]')
def test_ensure_key_info():
node = xmlsec.create_signature_template()
xmlsec.ensure_key_info(node)
assert node.xpath('.//*[local-name() = "KeyInfo"]')
def test_add_x509_data():
node = xmlsec.create_signature_template()
info = xmlsec.ensure_key_info(node)
xmlsec.add_x509_data(info)
assert node.xpath('.//*[local-name() = "X509Data"]')
def test_add_key_name():
node = xmlsec.create_signature_template()
info = xmlsec.ensure_key_info(node)
xmlsec.add_key_name(info, b'bob.pem')
assert node.xpath('.//*[local-name() = "KeyName" and text() = "bob.pem"]')
...
|
a97b557146edfb340ad83fd95838dc2a627ce32f
|
src/urls.py
|
src/urls.py
|
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
|
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
|
Fix urlconf to avoid string view arguments to url()
|
Fix urlconf to avoid string view arguments to url()
|
Python
|
agpl-3.0
|
luac/django-argcache,luac/django-argcache
|
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
- from django.conf.urls import *
+ from django.conf.urls import url
+
+ from .views import view_all, flush
urlpatterns = [
- url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
+ url(r'^view_all/?$', view_all, name='view_all'),
- url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
+ url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
|
Fix urlconf to avoid string view arguments to url()
|
## Code Before:
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
## Instruction:
Fix urlconf to avoid string view arguments to url()
## Code After:
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
|
# ... existing code ...
from django.conf.urls import url
from .views import view_all, flush
# ... modified code ...
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
# ... rest of the code ...
|
fef4e4ce6b05506babc2c325b08aed77af8b9a3c
|
cryptex/exchange.py
|
cryptex/exchange.py
|
class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
def buy(self, frm, to, quantity, price):
raise NotImplementedError
def sell(self, frm, to, quantity, price):
raise NotImplementedError
|
class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
def buy(self, market, quantity, price):
raise NotImplementedError
def sell(self, market, quantity, price):
raise NotImplementedError
|
Modify buy and sell interface to take market tuples
|
Modify buy and sell interface to take market tuples
|
Python
|
mit
|
coink/cryptex
|
class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
- def buy(self, frm, to, quantity, price):
+ def buy(self, market, quantity, price):
raise NotImplementedError
- def sell(self, frm, to, quantity, price):
+ def sell(self, market, quantity, price):
raise NotImplementedError
|
Modify buy and sell interface to take market tuples
|
## Code Before:
class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
def buy(self, frm, to, quantity, price):
raise NotImplementedError
def sell(self, frm, to, quantity, price):
raise NotImplementedError
## Instruction:
Modify buy and sell interface to take market tuples
## Code After:
class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
def buy(self, market, quantity, price):
raise NotImplementedError
def sell(self, market, quantity, price):
raise NotImplementedError
|
// ... existing code ...
def buy(self, market, quantity, price):
raise NotImplementedError
// ... modified code ...
def sell(self, market, quantity, price):
raise NotImplementedError
// ... rest of the code ...
|
3814a07c44c7d97a2ca4aa0f2741a913149d4acd
|
support/update-converity-branch.py
|
support/update-converity-branch.py
|
from __future__ import print_function
import shutil, tempfile
from subprocess import check_call
class Git:
def __init__(self, dir):
self.dir = dir
def __call__(self, *args):
check_call(['git'] + list(args), cwd=self.dir)
dir = tempfile.mkdtemp()
try:
git = Git(dir)
git('clone', '-b', 'coverity', '[email protected]:cppformat/cppformat.git', dir)
git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
git('reset', 'HEAD', '.travis.yml')
git('checkout', '--', '.travis.yml')
git('commit', '-m', 'Update coverity branch')
git('push')
finally:
shutil.rmtree(dir)
|
from __future__ import print_function
import shutil, tempfile
from subprocess import check_output, STDOUT
class Git:
def __init__(self, dir):
self.dir = dir
def __call__(self, *args):
output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
print(output)
return output
dir = tempfile.mkdtemp()
try:
git = Git(dir)
git('clone', '-b', 'coverity', '[email protected]:cppformat/cppformat.git', dir)
output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
if 'Fast-forward' not in output:
git('reset', 'HEAD', '.travis.yml')
git('checkout', '--', '.travis.yml')
git('commit', '-m', 'Update coverity branch')
git('push')
finally:
shutil.rmtree(dir)
|
Handle fast forward in update-coverity-branch.py
|
Handle fast forward in update-coverity-branch.py
|
Python
|
bsd-2-clause
|
lightslife/cppformat,alabuzhev/fmt,mojoBrendan/fmt,dean0x7d/cppformat,Jopie64/cppformat,alabuzhev/fmt,cppformat/cppformat,alabuzhev/fmt,mojoBrendan/fmt,cppformat/cppformat,lightslife/cppformat,cppformat/cppformat,dean0x7d/cppformat,lightslife/cppformat,dean0x7d/cppformat,mojoBrendan/fmt,Jopie64/cppformat,Jopie64/cppformat
|
from __future__ import print_function
import shutil, tempfile
- from subprocess import check_call
+ from subprocess import check_output, STDOUT
class Git:
def __init__(self, dir):
self.dir = dir
def __call__(self, *args):
- check_call(['git'] + list(args), cwd=self.dir)
+ output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
+ print(output)
+ return output
dir = tempfile.mkdtemp()
try:
git = Git(dir)
git('clone', '-b', 'coverity', '[email protected]:cppformat/cppformat.git', dir)
- git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
+ output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
+ if 'Fast-forward' not in output:
- git('reset', 'HEAD', '.travis.yml')
+ git('reset', 'HEAD', '.travis.yml')
- git('checkout', '--', '.travis.yml')
+ git('checkout', '--', '.travis.yml')
- git('commit', '-m', 'Update coverity branch')
+ git('commit', '-m', 'Update coverity branch')
git('push')
finally:
shutil.rmtree(dir)
|
Handle fast forward in update-coverity-branch.py
|
## Code Before:
from __future__ import print_function
import shutil, tempfile
from subprocess import check_call
class Git:
def __init__(self, dir):
self.dir = dir
def __call__(self, *args):
check_call(['git'] + list(args), cwd=self.dir)
dir = tempfile.mkdtemp()
try:
git = Git(dir)
git('clone', '-b', 'coverity', '[email protected]:cppformat/cppformat.git', dir)
git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
git('reset', 'HEAD', '.travis.yml')
git('checkout', '--', '.travis.yml')
git('commit', '-m', 'Update coverity branch')
git('push')
finally:
shutil.rmtree(dir)
## Instruction:
Handle fast forward in update-coverity-branch.py
## Code After:
from __future__ import print_function
import shutil, tempfile
from subprocess import check_output, STDOUT
class Git:
def __init__(self, dir):
self.dir = dir
def __call__(self, *args):
output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
print(output)
return output
dir = tempfile.mkdtemp()
try:
git = Git(dir)
git('clone', '-b', 'coverity', '[email protected]:cppformat/cppformat.git', dir)
output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
if 'Fast-forward' not in output:
git('reset', 'HEAD', '.travis.yml')
git('checkout', '--', '.travis.yml')
git('commit', '-m', 'Update coverity branch')
git('push')
finally:
shutil.rmtree(dir)
|
...
import shutil, tempfile
from subprocess import check_output, STDOUT
...
def __call__(self, *args):
output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
print(output)
return output
...
git('clone', '-b', 'coverity', '[email protected]:cppformat/cppformat.git', dir)
output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
if 'Fast-forward' not in output:
git('reset', 'HEAD', '.travis.yml')
git('checkout', '--', '.travis.yml')
git('commit', '-m', 'Update coverity branch')
git('push')
...
|
543fc894120db6e8d854e746d631c87cc53f622b
|
website/noveltorpedo/tests.py
|
website/noveltorpedo/tests.py
|
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
import unittest
from django.utils import timezone
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
author = Author()
author.name = "Jack Frost"
author.save()
story = Story()
story.title = "The Big One"
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = "Chapter One"
segment.contents = "This is how it all went down..."
segment.save()
|
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
from django.utils import timezone
from django.core.management import call_command
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
# Create a new story in the database.
author = Author()
author.name = 'Jack Frost'
author.save()
story = Story()
story.title = 'The Big One'
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = 'Chapter Three'
segment.contents = 'This is how it all went down...'
segment.save()
# Index the new story.
call_command('update_index')
# Query via author name.
response = client.get('/', {'q': 'Jack Frost'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via story name.
response = client.get('/', {'q': 'The Big One'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via segment contents.
response = client.get('/', {'q': 'Chapter Three'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
|
Rebuild index and test variety of queries
|
Rebuild index and test variety of queries
|
Python
|
mit
|
NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo
|
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
- import unittest
from django.utils import timezone
+ from django.core.management import call_command
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
+ # Create a new story in the database.
author = Author()
- author.name = "Jack Frost"
+ author.name = 'Jack Frost'
author.save()
story = Story()
- story.title = "The Big One"
+ story.title = 'The Big One'
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
- segment.title = "Chapter One"
+ segment.title = 'Chapter Three'
- segment.contents = "This is how it all went down..."
+ segment.contents = 'This is how it all went down...'
segment.save()
+
+ # Index the new story.
+ call_command('update_index')
+
+ # Query via author name.
+ response = client.get('/', {'q': 'Jack Frost'})
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, 'Jack Frost')
+ self.assertContains(response, 'The Big One')
+ self.assertContains(response, 'Chapter Three')
+ self.assertContains(response, 'This is how it all went down...')
+
+ # Query via story name.
+ response = client.get('/', {'q': 'The Big One'})
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, 'Jack Frost')
+ self.assertContains(response, 'The Big One')
+ self.assertContains(response, 'Chapter Three')
+ self.assertContains(response, 'This is how it all went down...')
+
+ # Query via segment contents.
+ response = client.get('/', {'q': 'Chapter Three'})
+ self.assertEqual(response.status_code, 200)
+ self.assertContains(response, 'Jack Frost')
+ self.assertContains(response, 'The Big One')
+ self.assertContains(response, 'Chapter Three')
+ self.assertContains(response, 'This is how it all went down...')
+
|
Rebuild index and test variety of queries
|
## Code Before:
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
import unittest
from django.utils import timezone
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
author = Author()
author.name = "Jack Frost"
author.save()
story = Story()
story.title = "The Big One"
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = "Chapter One"
segment.contents = "This is how it all went down..."
segment.save()
## Instruction:
Rebuild index and test variety of queries
## Code After:
from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
from django.utils import timezone
from django.core.management import call_command
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
# Create a new story in the database.
author = Author()
author.name = 'Jack Frost'
author.save()
story = Story()
story.title = 'The Big One'
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = 'Chapter Three'
segment.contents = 'This is how it all went down...'
segment.save()
# Index the new story.
call_command('update_index')
# Query via author name.
response = client.get('/', {'q': 'Jack Frost'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via story name.
response = client.get('/', {'q': 'The Big One'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via segment contents.
response = client.get('/', {'q': 'Chapter Three'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
|
# ... existing code ...
from noveltorpedo.models import *
from django.utils import timezone
from django.core.management import call_command
# ... modified code ...
def test_insertion_and_querying_of_data(self):
# Create a new story in the database.
author = Author()
author.name = 'Jack Frost'
author.save()
...
story = Story()
story.title = 'The Big One'
story.save()
...
segment.story = story
segment.title = 'Chapter Three'
segment.contents = 'This is how it all went down...'
segment.save()
# Index the new story.
call_command('update_index')
# Query via author name.
response = client.get('/', {'q': 'Jack Frost'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via story name.
response = client.get('/', {'q': 'The Big One'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via segment contents.
response = client.get('/', {'q': 'Chapter Three'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# ... rest of the code ...
|
3225c14ed1c3d09a68d6cde8af6d83d54a6f5f76
|
simple_history/__init__.py
|
simple_history/__init__.py
|
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
records_class = records_class or models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
if records_class is None:
records_class = models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
Change style of setting records_class default
|
Change style of setting records_class default
|
Python
|
bsd-3-clause
|
emergence/django-simple-history,luzfcb/django-simple-history,treyhunner/django-simple-history,pombredanne/django-simple-history,pombredanne/django-simple-history,treyhunner/django-simple-history,emergence/django-simple-history,luzfcb/django-simple-history
|
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
+ if records_class is None:
- records_class = records_class or models.HistoricalRecords
+ records_class = models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
Change style of setting records_class default
|
## Code Before:
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
records_class = records_class or models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
## Instruction:
Change style of setting records_class default
## Code After:
from __future__ import unicode_literals
__version__ = '1.5.1'
def register(
model, app=None, manager_name='history', records_class=None,
**records_config):
"""
Create historical model for `model` and attach history manager to `model`.
Keyword arguments:
app -- App to install historical model into (defaults to model.__module__)
manager_name -- class attribute name to use for historical manager
records_class -- class to use for history relation (defaults to
HistoricalRecords)
This method should be used as an alternative to attaching an
`HistoricalManager` instance directly to `model`.
"""
from . import models
if model._meta.db_table not in models.registered_models:
if records_class is None:
records_class = models.HistoricalRecords
records = records_class(**records_config)
records.manager_name = manager_name
records.module = app and ("%s.models" % app) or model.__module__
records.add_extra_methods(model)
records.finalize(model)
models.registered_models[model._meta.db_table] = model
|
# ... existing code ...
if model._meta.db_table not in models.registered_models:
if records_class is None:
records_class = models.HistoricalRecords
records = records_class(**records_config)
# ... rest of the code ...
|
56396f980236f6d909f63d7faaddd357f5fe235b
|
stock_quant_merge/models/stock.py
|
stock_quant_merge/models/stock.py
|
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
Use browse record instead of ids
|
[MOD] Use browse record instead of ids
|
Python
|
agpl-3.0
|
InakiZabala/odoomrp-wip,Eficent/odoomrp-wip,diagramsoftware/odoomrp-wip,jobiols/odoomrp-wip,Antiun/odoomrp-wip,factorlibre/odoomrp-wip,raycarnes/odoomrp-wip,Daniel-CA/odoomrp-wip-public,esthermm/odoomrp-wip,odoomrp/odoomrp-wip,Daniel-CA/odoomrp-wip-public,odoomrp/odoomrp-wip,oihane/odoomrp-wip,jobiols/odoomrp-wip,odoocn/odoomrp-wip,michaeljohn32/odoomrp-wip,alhashash/odoomrp-wip,esthermm/odoomrp-wip,agaldona/odoomrp-wip-1,Endika/odoomrp-wip,ddico/odoomrp-wip,oihane/odoomrp-wip,jorsea/odoomrp-wip,Eficent/odoomrp-wip,maljac/odoomrp-wip,xpansa/odoomrp-wip,diagramsoftware/odoomrp-wip,sergiocorato/odoomrp-wip,sergiocorato/odoomrp-wip,windedge/odoomrp-wip,agaldona/odoomrp-wip-1,alfredoavanzosc/odoomrp-wip-1,dvitme/odoomrp-wip,slevenhagen/odoomrp-wip-npg,factorlibre/odoomrp-wip,invitu/odoomrp-wip
|
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
- pending_quants_ids = self.ids
+ pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
- if (quant2merge.id in pending_quants_ids and
+ if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
- if quant.id in pending_quants_ids:
- pending_quants_ids.remove(quant.id)
+ pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
Use browse record instead of ids
|
## Code Before:
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants_ids = self.ids
for quant2merge in self:
if (quant2merge.id in pending_quants_ids and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
if quant.id in pending_quants_ids:
pending_quants_ids.remove(quant.id)
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
## Instruction:
Use browse record instead of ids
## Code After:
from openerp import models, api
class StockQuant(models.Model):
_inherit = 'stock.quant'
@api.multi
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
quants = self.search(
[('id', '!=', quant2merge.id),
('product_id', '=', quant2merge.product_id.id),
('lot_id', '=', quant2merge.lot_id.id),
('package_id', '=', quant2merge.package_id.id),
('location_id', '=', quant2merge.location_id.id),
('reservation_id', '=', False),
('propagated_from_id', '=',
quant2merge.propagated_from_id.id)])
for quant in quants:
if (self._get_latest_move(quant2merge) ==
self._get_latest_move(quant)):
quant2merge.qty += quant.qty
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
@api.model
def quants_unreserve(self, move):
quants = move.reserved_quant_ids
super(StockQuant, self).quants_unreserve(move)
quants.merge_stock_quants()
|
...
def merge_stock_quants(self):
pending_quants = self.filtered(lambda x: True)
for quant2merge in self:
if (quant2merge in pending_quants and
not quant2merge.reservation_id):
...
quant2merge.cost += quant.cost
pending_quants -= quant
quant.sudo().unlink()
...
|
74dcf36c2eecab290c1c76c947b024e51d280ea7
|
tests/test_rover_init.py
|
tests/test_rover_init.py
|
def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
|
def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
|
Add testing for default grid_* values
|
Add testing for default grid_* values
|
Python
|
mit
|
authentik8/rover
|
def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
+ assert rover.grid_x == 50
+ assert rover.grid_y == 50
+
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
+
+ assert rover.grid_x == 50
+ assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
|
Add testing for default grid_* values
|
## Code Before:
def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
## Instruction:
Add testing for default grid_* values
## Code After:
def test_rover_init_with_default_parameters():
from rover import Rover
rover = Rover()
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
from rover import Rover
rover = Rover(3, 7, 'W')
assert rover.x == 3
assert rover.y == 7
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_custom_grid():
from rover import Rover
rover = Rover(grid_x=100, grid_y=150)
assert rover.x == 0
assert rover.y == 0
assert rover.direction == 'N'
assert rover.grid_x == 100
assert rover.grid_y == 150
def test_rover_init_full_custom_grid():
from rover import Rover
rover = Rover(5, 9, 'E', 100, 150)
assert rover.x == 5
assert rover.y == 9
assert rover.direction == 'E'
assert rover.grid_x == 100
assert rover.grid_y == 150
|
...
assert rover.grid_x == 50
assert rover.grid_y == 50
def test_rover_init_with_custom_parameters():
...
assert rover.direction == 'W'
assert rover.grid_x == 50
assert rover.grid_y == 50
...
|
7b935b23e17ef873a060fdfbefbfdf232fe8b8de
|
git_release/release.py
|
git_release/release.py
|
import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
new_tag = _increment_tag(tag)
git_helpers.tag(signed, new_tag)
|
import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
new_tag = _increment_tag(tag, release_type)
git_helpers.tag(signed, new_tag)
|
Add missing argument to _increment_tag call
|
Add missing argument to _increment_tag call
|
Python
|
mit
|
Authentise/git-release
|
import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
- new_tag = _increment_tag(tag)
+ new_tag = _increment_tag(tag, release_type)
git_helpers.tag(signed, new_tag)
|
Add missing argument to _increment_tag call
|
## Code Before:
import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
new_tag = _increment_tag(tag)
git_helpers.tag(signed, new_tag)
## Instruction:
Add missing argument to _increment_tag call
## Code After:
import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
new_tag = _increment_tag(tag, release_type)
git_helpers.tag(signed, new_tag)
|
// ... existing code ...
new_tag = _increment_tag(tag, release_type)
// ... rest of the code ...
|
52c2205804d8dc38447bca1ccbf5599e00cd1d7b
|
main.py
|
main.py
|
import requests
CONFIG_DIR = "config"
class Bot:
def __init__(self):
self.config = Config(CONFIG_DIR)
self.api = TelegramBotApi(self.config.get_auth_token())
def run(self):
self.api.send_message(self.config.get_user_id(), "test")
class TelegramBotApi:
def __init__(self, auth_token):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
def send_message(self, chat_id, text):
self.__send_request("sendMessage", chat_id=chat_id, text=text)
def __send_request(self, command, **params):
requests.get(self.base_url + command, params=params)
class Config:
def __init__(self, config_dir):
self.config_dir = config_dir + "/"
def get_auth_token(self):
return self.__get_config_value("auth_token")
def get_user_id(self):
return self.__get_config_value("user_id")
def __get_config_value(self, config_key):
return open(self.config_dir + config_key).read().strip()
if __name__ == "__main__":
Bot().run()
|
import requests
CONFIG_DIR = "config"
class Bot:
def __init__(self):
self.config = Config(CONFIG_DIR)
self.api = TelegramBotApi(self.config.get_auth_token())
def run(self):
self.api.send_message(self.config.get_admin_user_id(), "test")
class TelegramBotApi:
def __init__(self, auth_token):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
def send_message(self, chat_id, text):
self.__send_request("sendMessage", chat_id=chat_id, text=text)
def __send_request(self, command, **params):
requests.get(self.base_url + command, params=params)
class Config:
def __init__(self, config_dir):
self.config_dir = config_dir + "/"
def get_auth_token(self):
return self.__get_config_value("auth_token")
def get_admin_user_id(self):
return self.__get_config_value("admin_user_id")
def __get_config_value(self, config_key):
return open(self.config_dir + config_key).read().strip()
if __name__ == "__main__":
Bot().run()
|
Rename user_id config key to admin_user_id
|
Rename user_id config key to admin_user_id
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
import requests
CONFIG_DIR = "config"
class Bot:
def __init__(self):
self.config = Config(CONFIG_DIR)
self.api = TelegramBotApi(self.config.get_auth_token())
def run(self):
- self.api.send_message(self.config.get_user_id(), "test")
+ self.api.send_message(self.config.get_admin_user_id(), "test")
class TelegramBotApi:
def __init__(self, auth_token):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
def send_message(self, chat_id, text):
self.__send_request("sendMessage", chat_id=chat_id, text=text)
def __send_request(self, command, **params):
requests.get(self.base_url + command, params=params)
class Config:
def __init__(self, config_dir):
self.config_dir = config_dir + "/"
def get_auth_token(self):
return self.__get_config_value("auth_token")
- def get_user_id(self):
+ def get_admin_user_id(self):
- return self.__get_config_value("user_id")
+ return self.__get_config_value("admin_user_id")
def __get_config_value(self, config_key):
return open(self.config_dir + config_key).read().strip()
if __name__ == "__main__":
Bot().run()
|
Rename user_id config key to admin_user_id
|
## Code Before:
import requests
CONFIG_DIR = "config"
class Bot:
def __init__(self):
self.config = Config(CONFIG_DIR)
self.api = TelegramBotApi(self.config.get_auth_token())
def run(self):
self.api.send_message(self.config.get_user_id(), "test")
class TelegramBotApi:
def __init__(self, auth_token):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
def send_message(self, chat_id, text):
self.__send_request("sendMessage", chat_id=chat_id, text=text)
def __send_request(self, command, **params):
requests.get(self.base_url + command, params=params)
class Config:
def __init__(self, config_dir):
self.config_dir = config_dir + "/"
def get_auth_token(self):
return self.__get_config_value("auth_token")
def get_user_id(self):
return self.__get_config_value("user_id")
def __get_config_value(self, config_key):
return open(self.config_dir + config_key).read().strip()
if __name__ == "__main__":
Bot().run()
## Instruction:
Rename user_id config key to admin_user_id
## Code After:
import requests
CONFIG_DIR = "config"
class Bot:
def __init__(self):
self.config = Config(CONFIG_DIR)
self.api = TelegramBotApi(self.config.get_auth_token())
def run(self):
self.api.send_message(self.config.get_admin_user_id(), "test")
class TelegramBotApi:
def __init__(self, auth_token):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
def send_message(self, chat_id, text):
self.__send_request("sendMessage", chat_id=chat_id, text=text)
def __send_request(self, command, **params):
requests.get(self.base_url + command, params=params)
class Config:
def __init__(self, config_dir):
self.config_dir = config_dir + "/"
def get_auth_token(self):
return self.__get_config_value("auth_token")
def get_admin_user_id(self):
return self.__get_config_value("admin_user_id")
def __get_config_value(self, config_key):
return open(self.config_dir + config_key).read().strip()
if __name__ == "__main__":
Bot().run()
|
# ... existing code ...
def run(self):
self.api.send_message(self.config.get_admin_user_id(), "test")
# ... modified code ...
def get_admin_user_id(self):
return self.__get_config_value("admin_user_id")
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.