commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ea5499d36ef84e879737fd8c6d6148dd8305c356
|
bookshelf/search_indexes.py
|
bookshelf/search_indexes.py
|
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
Allow some blank fields in Booki model from search
|
Allow some blank fields in Booki model from search
|
Python
|
agpl-3.0
|
antoviaque/plin,antoviaque/plin,antoviaque/plin
|
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
- subtitle = indexes.CharField(model_attr='subtitle')
+ subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
- illustrator = indexes.CharField(model_attr='illustrator')
+ illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
- collection = indexes.CharField(model_attr='collection')
+ collection = indexes.CharField(model_attr='collection', blank=True)
- keywords = indexes.CharField(model_attr='keywords')
+ keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
Allow some blank fields in Booki model from search
|
## Code Before:
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle')
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator')
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection')
keywords = indexes.CharField(model_attr='keywords')
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
## Instruction:
Allow some blank fields in Booki model from search
## Code After:
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
subtitle = indexes.CharField(model_attr='subtitle', blank=True)
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
illustrator = indexes.CharField(model_attr='illustrator', blank=True)
editor = indexes.CharField(model_attr='editor')
collection = indexes.CharField(model_attr='collection', blank=True)
keywords = indexes.CharField(model_attr='keywords', blank=True)
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
from haystack import indexes
from .models import Book
# Classes #####################################################################
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
- subtitle = indexes.CharField(model_attr='subtitle')
+ subtitle = indexes.CharField(model_attr='subtitle', blank=True)
? ++++++++++++
isbn = indexes.CharField(model_attr='isbn')
author = indexes.CharField(model_attr='author')
- illustrator = indexes.CharField(model_attr='illustrator')
+ illustrator = indexes.CharField(model_attr='illustrator', blank=True)
? ++++++++++++
editor = indexes.CharField(model_attr='editor')
- collection = indexes.CharField(model_attr='collection')
+ collection = indexes.CharField(model_attr='collection', blank=True)
? ++++++++++++
- keywords = indexes.CharField(model_attr='keywords')
+ keywords = indexes.CharField(model_attr='keywords', blank=True)
? ++++++++++++
synopsis = indexes.CharField(model_attr='synopsis')
def get_model(self):
return Book
|
2c3ddc18477561f4880c2b857c4aa8a0f8478dfd
|
src/psycholinguistic_db/psycholinguistic_db_creator.py
|
src/psycholinguistic_db/psycholinguistic_db_creator.py
|
__author__ = 'Somsubhra Bairi'
__email__ = '[email protected]'
# All imports
from logger import Logger
# The psycholinguistic database creator
class PsycholinguisticDbCreator:
# Constructor for the database creator
def __init__(self, in_file, out_file):
self.in_file = in_file
self.out_file = out_file
# Create the database
def create(self):
Logger.log_message('Creating psycholinguistic dictionary database')
input_file = open(self.in_file, 'r')
output_file = open(self.out_file, 'w')
for line in input_file.readlines():
output_file.write(';'.join(word.lower() for word in line.split()) + '\n')
input_file.close()
output_file.close()
Logger.log_success('Created psycholinguistic dictionary database')
|
__author__ = 'Somsubhra Bairi'
__email__ = '[email protected]'
# All imports
from logger import Logger
from nltk import PorterStemmer
# The psycholinguistic database creator
class PsycholinguisticDbCreator:
# Constructor for the database creator
def __init__(self, in_file, out_file):
self.in_file = in_file
self.out_file = out_file
self.kf_frequencies = {}
self.syllables = {}
# Create the database
def create(self):
Logger.log_message('Creating psycholinguistic dictionary database')
input_file = open(self.in_file, 'r')
output_file = open(self.out_file, 'w')
for line in input_file.readlines():
items = line.split()
word = PorterStemmer().stem_word(items[2].lower())
kff = items[1]
syl = items[0]
if word in self.kf_frequencies:
# Select the stemmed word with the maximum KF Frequency
if kff > self.kf_frequencies[word]:
self.kf_frequencies[word] = kff
else:
self.kf_frequencies[word] = kff
if word in self.syllables:
# Select the stemmed word with minimum number of syllables
if syl < self.syllables[word]:
self.syllables[word] = syl
else:
self.syllables[word] = syl
# Dump the contents to the output file
for word in self.kf_frequencies:
output_file.write(word + ";" + self.kf_frequencies[word] + ";" + self.syllables[word] + "\n")
input_file.close()
output_file.close()
Logger.log_success('Created psycholinguistic dictionary database')
|
Create the psycholinguistic_db according to our needs
|
Create the psycholinguistic_db according to our needs
|
Python
|
mit
|
Somsubhra/Enrich,Somsubhra/Enrich,Somsubhra/Enrich
|
__author__ = 'Somsubhra Bairi'
__email__ = '[email protected]'
# All imports
from logger import Logger
+
+ from nltk import PorterStemmer
# The psycholinguistic database creator
class PsycholinguisticDbCreator:
# Constructor for the database creator
def __init__(self, in_file, out_file):
self.in_file = in_file
self.out_file = out_file
+ self.kf_frequencies = {}
+ self.syllables = {}
# Create the database
def create(self):
Logger.log_message('Creating psycholinguistic dictionary database')
input_file = open(self.in_file, 'r')
output_file = open(self.out_file, 'w')
for line in input_file.readlines():
- output_file.write(';'.join(word.lower() for word in line.split()) + '\n')
+ items = line.split()
+ word = PorterStemmer().stem_word(items[2].lower())
+ kff = items[1]
+ syl = items[0]
+
+ if word in self.kf_frequencies:
+ # Select the stemmed word with the maximum KF Frequency
+ if kff > self.kf_frequencies[word]:
+ self.kf_frequencies[word] = kff
+ else:
+ self.kf_frequencies[word] = kff
+
+ if word in self.syllables:
+ # Select the stemmed word with minimum number of syllables
+ if syl < self.syllables[word]:
+ self.syllables[word] = syl
+ else:
+ self.syllables[word] = syl
+
+ # Dump the contents to the output file
+ for word in self.kf_frequencies:
+ output_file.write(word + ";" + self.kf_frequencies[word] + ";" + self.syllables[word] + "\n")
input_file.close()
output_file.close()
Logger.log_success('Created psycholinguistic dictionary database')
|
Create the psycholinguistic_db according to our needs
|
## Code Before:
__author__ = 'Somsubhra Bairi'
__email__ = '[email protected]'
# All imports
from logger import Logger
# The psycholinguistic database creator
class PsycholinguisticDbCreator:
# Constructor for the database creator
def __init__(self, in_file, out_file):
self.in_file = in_file
self.out_file = out_file
# Create the database
def create(self):
Logger.log_message('Creating psycholinguistic dictionary database')
input_file = open(self.in_file, 'r')
output_file = open(self.out_file, 'w')
for line in input_file.readlines():
output_file.write(';'.join(word.lower() for word in line.split()) + '\n')
input_file.close()
output_file.close()
Logger.log_success('Created psycholinguistic dictionary database')
## Instruction:
Create the psycholinguistic_db according to our needs
## Code After:
__author__ = 'Somsubhra Bairi'
__email__ = '[email protected]'
# All imports
from logger import Logger
from nltk import PorterStemmer
# The psycholinguistic database creator
class PsycholinguisticDbCreator:
# Constructor for the database creator
def __init__(self, in_file, out_file):
self.in_file = in_file
self.out_file = out_file
self.kf_frequencies = {}
self.syllables = {}
# Create the database
def create(self):
Logger.log_message('Creating psycholinguistic dictionary database')
input_file = open(self.in_file, 'r')
output_file = open(self.out_file, 'w')
for line in input_file.readlines():
items = line.split()
word = PorterStemmer().stem_word(items[2].lower())
kff = items[1]
syl = items[0]
if word in self.kf_frequencies:
# Select the stemmed word with the maximum KF Frequency
if kff > self.kf_frequencies[word]:
self.kf_frequencies[word] = kff
else:
self.kf_frequencies[word] = kff
if word in self.syllables:
# Select the stemmed word with minimum number of syllables
if syl < self.syllables[word]:
self.syllables[word] = syl
else:
self.syllables[word] = syl
# Dump the contents to the output file
for word in self.kf_frequencies:
output_file.write(word + ";" + self.kf_frequencies[word] + ";" + self.syllables[word] + "\n")
input_file.close()
output_file.close()
Logger.log_success('Created psycholinguistic dictionary database')
|
__author__ = 'Somsubhra Bairi'
__email__ = '[email protected]'
# All imports
from logger import Logger
+
+ from nltk import PorterStemmer
# The psycholinguistic database creator
class PsycholinguisticDbCreator:
# Constructor for the database creator
def __init__(self, in_file, out_file):
self.in_file = in_file
self.out_file = out_file
+ self.kf_frequencies = {}
+ self.syllables = {}
# Create the database
def create(self):
Logger.log_message('Creating psycholinguistic dictionary database')
input_file = open(self.in_file, 'r')
output_file = open(self.out_file, 'w')
for line in input_file.readlines():
- output_file.write(';'.join(word.lower() for word in line.split()) + '\n')
+ items = line.split()
+ word = PorterStemmer().stem_word(items[2].lower())
+ kff = items[1]
+ syl = items[0]
+
+ if word in self.kf_frequencies:
+ # Select the stemmed word with the maximum KF Frequency
+ if kff > self.kf_frequencies[word]:
+ self.kf_frequencies[word] = kff
+ else:
+ self.kf_frequencies[word] = kff
+
+ if word in self.syllables:
+ # Select the stemmed word with minimum number of syllables
+ if syl < self.syllables[word]:
+ self.syllables[word] = syl
+ else:
+ self.syllables[word] = syl
+
+ # Dump the contents to the output file
+ for word in self.kf_frequencies:
+ output_file.write(word + ";" + self.kf_frequencies[word] + ";" + self.syllables[word] + "\n")
input_file.close()
output_file.close()
Logger.log_success('Created psycholinguistic dictionary database')
|
6e17e781f6cb8e29a7284beffe10463c843b86b3
|
tests/test_vector2_equality.py
|
tests/test_vector2_equality.py
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
Add another (generated) negative example
|
tests/equality: Add another (generated) negative example
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
+ assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
Add another (generated) negative example
|
## Code Before:
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
## Instruction:
Add another (generated) negative example
## Code After:
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
from hypothesis import assume, given
from ppb_vector import Vector2
from utils import vectors
@given(x=vectors())
def test_equal_self(x: Vector2):
assert x == x
@given(x=vectors())
def test_non_zero_equal(x: Vector2):
assume(x != (0, 0))
assert x != 1.1 * x
+ assert x != -x
@given(x=vectors(), y=vectors())
def test_not_equal_equivalent(x: Vector2, y: Vector2):
assert (x != y) == (not x == y)
|
06349ea257219e8ad1808fa4fd77f34f7371894a
|
test/test.py
|
test/test.py
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
Remove dead code, os.path.mkdir does not even exist
|
Remove dead code, os.path.mkdir does not even exist
|
Python
|
bsd-3-clause
|
davvo/mbutil-eniro,mapbox/mbutil,mapbox/mbutil
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
- except Exception: pass
-
- try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
Remove dead code, os.path.mkdir does not even exist
|
## Code Before:
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
## Instruction:
Remove dead code, os.path.mkdir does not even exist
## Code After:
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
import os, shutil
from nose import with_setup
from mbutil import mbtiles_to_disk, disk_to_mbtiles
def clear_data():
try: shutil.rmtree('test/output')
- except Exception: pass
-
- try: os.path.mkdir('test/output')
except Exception: pass
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
@with_setup(clear_data, clear_data)
def test_mbtiles_to_disk_and_back():
mbtiles_to_disk('test/data/one_tile.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.png')
disk_to_mbtiles('test/output/', 'test/output/one.mbtiles')
assert os.path.exists('test/output/one.mbtiles')
@with_setup(clear_data, clear_data)
def test_utf8grid_mbtiles_to_disk():
mbtiles_to_disk('test/data/utf8grid.mbtiles', 'test/output')
assert os.path.exists('test/output/0/0/0.grid.json')
assert os.path.exists('test/output/0/0/0.png')
assert os.path.exists('test/output/metadata.json')
|
d676a1b1e7e3efbbfc72f1d7e522865b623783df
|
utils/etc.py
|
utils/etc.py
|
def reverse_insort(seq, val):
lo = 0
hi = len(seq)
while lo < hi:
mid = (lo + hi) // 2
if val > seq[mid]:
hi = mid
else:
lo = mid + 1
seq.insert(lo, val)
|
def reverse_insort(seq, val, lo=0, hi=None):
if hi is None:
hi = len(seq)
while lo < hi:
mid = (lo + hi) // 2
if val > seq[mid]:
hi = mid
else:
lo = mid + 1
seq.insert(lo, val)
|
Add optional hi and lo params to reverse_insort
|
Add optional hi and lo params to reverse_insort
|
Python
|
mit
|
BeatButton/beattie,BeatButton/beattie-bot
|
- def reverse_insort(seq, val):
+ def reverse_insort(seq, val, lo=0, hi=None):
- lo = 0
+ if hi is None:
- hi = len(seq)
+ hi = len(seq)
while lo < hi:
mid = (lo + hi) // 2
if val > seq[mid]:
hi = mid
else:
lo = mid + 1
seq.insert(lo, val)
|
Add optional hi and lo params to reverse_insort
|
## Code Before:
def reverse_insort(seq, val):
lo = 0
hi = len(seq)
while lo < hi:
mid = (lo + hi) // 2
if val > seq[mid]:
hi = mid
else:
lo = mid + 1
seq.insert(lo, val)
## Instruction:
Add optional hi and lo params to reverse_insort
## Code After:
def reverse_insort(seq, val, lo=0, hi=None):
if hi is None:
hi = len(seq)
while lo < hi:
mid = (lo + hi) // 2
if val > seq[mid]:
hi = mid
else:
lo = mid + 1
seq.insert(lo, val)
|
- def reverse_insort(seq, val):
+ def reverse_insort(seq, val, lo=0, hi=None):
? +++++++++++++++
- lo = 0
+ if hi is None:
- hi = len(seq)
+ hi = len(seq)
? ++++
while lo < hi:
mid = (lo + hi) // 2
if val > seq[mid]:
hi = mid
else:
lo = mid + 1
seq.insert(lo, val)
|
3ccdd5e6c52b9c46f9245df647b7b9703424eb74
|
pyramda/iterable/reject_test.py
|
pyramda/iterable/reject_test.py
|
from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
|
from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
|
Add test to ensure reject does not remove duplicates
|
Add test to ensure reject does not remove duplicates
|
Python
|
mit
|
jackfirth/pyramda
|
from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
+
+ def test_reject_does_not_remove_duplicates():
+ assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
+
+
+ def test_curry_reject_does_not_remove_duplicates():
+ assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
+
|
Add test to ensure reject does not remove duplicates
|
## Code Before:
from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
## Instruction:
Add test to ensure reject does not remove duplicates
## Code After:
from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
def test_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
def test_curry_reject_does_not_remove_duplicates():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
|
from . import reject
def test_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4]) == [2, 4]
def test_curry_reject_filters_out_unwanted_items_in_iterable():
assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4]) == [2, 4]
+
+
+ def test_reject_does_not_remove_duplicates():
+ assert reject(lambda x: x % 2 == 1, [1, 2, 3, 4, 4]) == [2, 4, 4]
+
+
+ def test_curry_reject_does_not_remove_duplicates():
+ assert reject(lambda x: x % 2 == 1)([1, 2, 3, 4, 4]) == [2, 4, 4]
|
24d3f19984e4bfa1ad38faf700ae53f5f4ac10bd
|
jay/urls.py
|
jay/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
from . import demo_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.as_view(template_name="base/base.html")),
url(r'^demo/', include(demo_urls)),
url(r'^login/', auth_views.login, {'template_name': 'auth/login.html'}),
url(r'^logout/', auth_views.logout, {'template_name': 'auth/logout.html'}),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
from . import demo_urls
from votes import urls as votes_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.as_view(template_name="base/base.html")),
url(r'^demo/', include(demo_urls)),
url(r'^(?P<system_name>[\w-]+)/', include(votes_urls)),
url(r'^login/', auth_views.login, {'template_name': 'auth/login.html'}),
url(r'^logout/', auth_views.logout, {'template_name': 'auth/logout.html'}),
]
|
Add votes URL scheme to main URL scheme
|
Add votes URL scheme to main URL scheme
|
Python
|
mit
|
OpenJUB/jay,kuboschek/jay,OpenJUB/jay,OpenJUB/jay,kuboschek/jay,kuboschek/jay
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
from . import demo_urls
+ from votes import urls as votes_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.as_view(template_name="base/base.html")),
url(r'^demo/', include(demo_urls)),
+ url(r'^(?P<system_name>[\w-]+)/', include(votes_urls)),
url(r'^login/', auth_views.login, {'template_name': 'auth/login.html'}),
url(r'^logout/', auth_views.logout, {'template_name': 'auth/logout.html'}),
]
|
Add votes URL scheme to main URL scheme
|
## Code Before:
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
from . import demo_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.as_view(template_name="base/base.html")),
url(r'^demo/', include(demo_urls)),
url(r'^login/', auth_views.login, {'template_name': 'auth/login.html'}),
url(r'^logout/', auth_views.logout, {'template_name': 'auth/logout.html'}),
]
## Instruction:
Add votes URL scheme to main URL scheme
## Code After:
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
from . import demo_urls
from votes import urls as votes_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.as_view(template_name="base/base.html")),
url(r'^demo/', include(demo_urls)),
url(r'^(?P<system_name>[\w-]+)/', include(votes_urls)),
url(r'^login/', auth_views.login, {'template_name': 'auth/login.html'}),
url(r'^logout/', auth_views.logout, {'template_name': 'auth/logout.html'}),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic import TemplateView
from . import demo_urls
+ from votes import urls as votes_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', TemplateView.as_view(template_name="base/base.html")),
url(r'^demo/', include(demo_urls)),
+ url(r'^(?P<system_name>[\w-]+)/', include(votes_urls)),
url(r'^login/', auth_views.login, {'template_name': 'auth/login.html'}),
url(r'^logout/', auth_views.logout, {'template_name': 'auth/logout.html'}),
]
|
73b6a84cfc0ccc20d04c3dd80c3e505cd118be4d
|
nsfw.py
|
nsfw.py
|
import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['gel'])
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
|
import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['gel'], hidden=True)
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
|
Make command invisible by default
|
Make command invisible by default
|
Python
|
mit
|
BeatButton/beattie-bot,BeatButton/beattie
|
import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
- @commands.command(aliases=['gel'])
+ @commands.command(aliases=['gel'], hidden=True)
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
|
Make command invisible by default
|
## Code Before:
import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['gel'])
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
## Instruction:
Make command invisible by default
## Code After:
import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['gel'], hidden=True)
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
|
import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
- @commands.command(aliases=['gel'])
+ @commands.command(aliases=['gel'], hidden=True)
? +++++++++++++
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
|
26e0d89e5178fb05b95f56cbef58ac37bfa6f1d9
|
camera_opencv.py
|
camera_opencv.py
|
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
|
import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
|
Use OPENCV_CAMERA_SOURCE environment variable to set source
|
Use OPENCV_CAMERA_SOURCE environment variable to set source
|
Python
|
mit
|
miguelgrinberg/flask-video-streaming,miguelgrinberg/flask-video-streaming
|
+ import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
+
+ def __init__(self):
+ if os.environ.get('OPENCV_CAMERA_SOURCE'):
+ Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
+ super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
|
Use OPENCV_CAMERA_SOURCE environment variable to set source
|
## Code Before:
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
## Instruction:
Use OPENCV_CAMERA_SOURCE environment variable to set source
## Code After:
import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
|
+ import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
+
+ def __init__(self):
+ if os.environ.get('OPENCV_CAMERA_SOURCE'):
+ Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
+ super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
|
bc021f416530375066c67c117995bd44c2bac7d5
|
timezone_field/__init__.py
|
timezone_field/__init__.py
|
from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
|
__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
Add and __all__ designator to top-level
|
Add and __all__ designator to top-level
|
Python
|
bsd-2-clause
|
mfogel/django-timezone-field
|
- from .fields import TimeZoneField # noqa
- from .forms import TimeZoneFormField # noqa
+ __version__ = '1.0'
+ __all__ = ['TimeZoneField', 'TimeZoneFormField']
- __version__ = '1.0'
+ from timezone_field.fields import TimeZoneField
+ from timezone_field.forms import TimeZoneFormField
|
Add and __all__ designator to top-level
|
## Code Before:
from .fields import TimeZoneField # noqa
from .forms import TimeZoneFormField # noqa
__version__ = '1.0'
## Instruction:
Add and __all__ designator to top-level
## Code After:
__version__ = '1.0'
__all__ = ['TimeZoneField', 'TimeZoneFormField']
from timezone_field.fields import TimeZoneField
from timezone_field.forms import TimeZoneFormField
|
- from .fields import TimeZoneField # noqa
- from .forms import TimeZoneFormField # noqa
+ __version__ = '1.0'
+ __all__ = ['TimeZoneField', 'TimeZoneFormField']
- __version__ = '1.0'
+ from timezone_field.fields import TimeZoneField
+ from timezone_field.forms import TimeZoneFormField
|
39fbce2a0e225591423f9b2d1edd111822063466
|
app/core/api.py
|
app/core/api.py
|
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
return jsonify({'Success': True, 'ipAddress': get_client_ip()})
def get_client_ip():
return request.headers.get('X-Forwarded-For') or request.remote_addr
|
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
"""Return client IP"""
return api_reply({'ipAddress': get_client_ip()})
def get_client_ip():
"""Return the client x-forwarded-for header or IP address"""
return request.headers.get('X-Forwarded-For') or request.remote_addr
def api_reply(body={}, success=True):
"""Create a standard API reply interface"""
return jsonify({**body, 'success': success})
|
Add a standard API reply interface
|
Add a standard API reply interface
|
Python
|
mit
|
jniedrauer/jniedrauer.com,jniedrauer/jniedrauer.com,jniedrauer/jniedrauer.com
|
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
+ """Return client IP"""
- return jsonify({'Success': True, 'ipAddress': get_client_ip()})
+ return api_reply({'ipAddress': get_client_ip()})
def get_client_ip():
+ """Return the client x-forwarded-for header or IP address"""
return request.headers.get('X-Forwarded-For') or request.remote_addr
+
+ def api_reply(body={}, success=True):
+ """Create a standard API reply interface"""
+ return jsonify({**body, 'success': success})
+
|
Add a standard API reply interface
|
## Code Before:
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
return jsonify({'Success': True, 'ipAddress': get_client_ip()})
def get_client_ip():
return request.headers.get('X-Forwarded-For') or request.remote_addr
## Instruction:
Add a standard API reply interface
## Code After:
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
"""Return client IP"""
return api_reply({'ipAddress': get_client_ip()})
def get_client_ip():
"""Return the client x-forwarded-for header or IP address"""
return request.headers.get('X-Forwarded-For') or request.remote_addr
def api_reply(body={}, success=True):
"""Create a standard API reply interface"""
return jsonify({**body, 'success': success})
|
from flask import jsonify, request
from ..main import app
@app.route('/api/ip')
def api_ip():
+ """Return client IP"""
- return jsonify({'Success': True, 'ipAddress': get_client_ip()})
? ^^^^ ^ -----------------
+ return api_reply({'ipAddress': get_client_ip()})
? ^^ ^^^^^
def get_client_ip():
+ """Return the client x-forwarded-for header or IP address"""
return request.headers.get('X-Forwarded-For') or request.remote_addr
+
+
+ def api_reply(body={}, success=True):
+ """Create a standard API reply interface"""
+ return jsonify({**body, 'success': success})
|
9387dfd4cc39fa6fbbf66147ced880dffa6408bd
|
keystone/server/flask/__init__.py
|
keystone/server/flask/__init__.py
|
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
|
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
|
Make keystone.server.flask more interesting for importing
|
Make keystone.server.flask more interesting for importing
Importing keystone.server.flask now exposes all the relevant bits
from the sub modules to develop APIs without needing to understand
all the underlying modules. __all__ has also be setup in a meaningful
way to allow for `from keystone.server.flask import *` and have
all the needed objects to start developing APIs for keystone.
Change-Id: Iab22cabb71c6690e6ffb0c9de68ed8437c4848de
Partial-Bug: #1776504
|
Python
|
apache-2.0
|
openstack/keystone,openstack/keystone,mahak/keystone,openstack/keystone,mahak/keystone,mahak/keystone
|
+ from keystone.server.flask import application
+ from keystone.server.flask.common import APIBase # noqa
+ from keystone.server.flask.common import base_url # noqa
+ from keystone.server.flask.common import construct_json_home_data # noqa
+ from keystone.server.flask.common import construct_resource_map # noqa
+ from keystone.server.flask.common import full_url # noqa
+ from keystone.server.flask.common import JsonHomeData # noqa
+ from keystone.server.flask.common import ResourceBase # noqa
+ from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
- from keystone.server.flask import application # noqa
- __all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
- 'setup_app_middleware')
+ # NOTE(morgan): This allows for from keystone.flask import * and have all the
+ # cool stuff needed to develop new APIs within a module/subsystem
+ __all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
+ 'base_url', 'construct_json_home_data', 'construct_resource_map',
+ 'full_url', 'fail_gracefully')
+ application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
|
Make keystone.server.flask more interesting for importing
|
## Code Before:
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
## Instruction:
Make keystone.server.flask more interesting for importing
## Code After:
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
|
+ from keystone.server.flask import application
+ from keystone.server.flask.common import APIBase # noqa
+ from keystone.server.flask.common import base_url # noqa
+ from keystone.server.flask.common import construct_json_home_data # noqa
+ from keystone.server.flask.common import construct_resource_map # noqa
+ from keystone.server.flask.common import full_url # noqa
+ from keystone.server.flask.common import JsonHomeData # noqa
+ from keystone.server.flask.common import ResourceBase # noqa
+ from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
- from keystone.server.flask import application # noqa
- __all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
- 'setup_app_middleware')
+ # NOTE(morgan): This allows for from keystone.flask import * and have all the
+ # cool stuff needed to develop new APIs within a module/subsystem
+ __all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
+ 'base_url', 'construct_json_home_data', 'construct_resource_map',
+ 'full_url', 'fail_gracefully')
+ application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
|
6c67d06a691be8a930c0e82fcf404057580645d8
|
tests/conftest.py
|
tests/conftest.py
|
import os
import sys
from pathlib import Path
import pytest
if sys.version_info < (3, 4):
print("Requires Python 3.4+")
sys.exit(1)
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(TESTS_ROOT)
@pytest.fixture
def resources():
return Path(TESTS_ROOT) / 'resources'
@pytest.fixture(scope="function")
def outdir(tmp_path):
return tmp_path
@pytest.fixture(scope="function")
def outpdf(tmp_path):
return tmp_path / 'out.pdf'
|
import os
import sys
from pathlib import Path
import pytest
if sys.version_info < (3, 4):
print("Requires Python 3.4+")
sys.exit(1)
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(TESTS_ROOT)
@pytest.fixture(scope="session")
def resources():
return Path(TESTS_ROOT) / 'resources'
@pytest.fixture(scope="function")
def outdir(tmp_path):
return tmp_path
@pytest.fixture(scope="function")
def outpdf(tmp_path):
return tmp_path / 'out.pdf'
|
Fix warning from hypothesis above scope of resources() fixture
|
Fix warning from hypothesis above scope of resources() fixture
|
Python
|
mpl-2.0
|
pikepdf/pikepdf,pikepdf/pikepdf,pikepdf/pikepdf
|
import os
import sys
from pathlib import Path
import pytest
-
if sys.version_info < (3, 4):
print("Requires Python 3.4+")
sys.exit(1)
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(TESTS_ROOT)
- @pytest.fixture
+ @pytest.fixture(scope="session")
def resources():
return Path(TESTS_ROOT) / 'resources'
@pytest.fixture(scope="function")
def outdir(tmp_path):
return tmp_path
@pytest.fixture(scope="function")
def outpdf(tmp_path):
return tmp_path / 'out.pdf'
|
Fix warning from hypothesis above scope of resources() fixture
|
## Code Before:
import os
import sys
from pathlib import Path
import pytest
if sys.version_info < (3, 4):
print("Requires Python 3.4+")
sys.exit(1)
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(TESTS_ROOT)
@pytest.fixture
def resources():
return Path(TESTS_ROOT) / 'resources'
@pytest.fixture(scope="function")
def outdir(tmp_path):
return tmp_path
@pytest.fixture(scope="function")
def outpdf(tmp_path):
return tmp_path / 'out.pdf'
## Instruction:
Fix warning from hypothesis above scope of resources() fixture
## Code After:
import os
import sys
from pathlib import Path
import pytest
if sys.version_info < (3, 4):
print("Requires Python 3.4+")
sys.exit(1)
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(TESTS_ROOT)
@pytest.fixture(scope="session")
def resources():
return Path(TESTS_ROOT) / 'resources'
@pytest.fixture(scope="function")
def outdir(tmp_path):
return tmp_path
@pytest.fixture(scope="function")
def outpdf(tmp_path):
return tmp_path / 'out.pdf'
|
import os
import sys
from pathlib import Path
import pytest
-
if sys.version_info < (3, 4):
print("Requires Python 3.4+")
sys.exit(1)
TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.dirname(TESTS_ROOT)
- @pytest.fixture
+ @pytest.fixture(scope="session")
def resources():
return Path(TESTS_ROOT) / 'resources'
@pytest.fixture(scope="function")
def outdir(tmp_path):
return tmp_path
@pytest.fixture(scope="function")
def outpdf(tmp_path):
return tmp_path / 'out.pdf'
|
67e47e0179352e9d5206fe7196762481d0bcaba4
|
aspen/server.py
|
aspen/server.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
algorithm.run(argv=self.argv, _through='get_website_from_argv')
return algorithm.state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
Drop back to state as return val
|
Drop back to state as return val
If we store state on Algorithm then we're not thread-safe.
|
Python
|
mit
|
gratipay/aspen.py,gratipay/aspen.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
- algorithm.run(argv=self.argv, _through='get_website_from_argv')
+ state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
- return algorithm.state['website']
+ return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
Drop back to state as return val
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
algorithm.run(argv=self.argv, _through='get_website_from_argv')
return algorithm.state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
## Instruction:
Drop back to state as return val
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from algorithm import Algorithm
def main():
Server().main()
class Server(object):
def __init__(self, argv=None):
self.argv = argv
def get_algorithm(self):
return Algorithm('aspen.algorithms.server')
def get_website(self):
"""Return a website object. Useful in testing.
"""
algorithm = self.get_algorithm()
- algorithm.run(argv=self.argv, _through='get_website_from_argv')
+ state = algorithm.run(argv=self.argv, _through='get_website_from_argv')
? ++++++++
- return algorithm.state['website']
? ----------
+ return state['website']
def main(self, argv=None):
"""http://aspen.io/cli/
"""
try:
argv = argv if argv is not None else self.argv
algorithm = self.get_algorithm()
algorithm.run(argv=argv)
except (SystemExit, KeyboardInterrupt):
# Under some (most?) network engines, a SIGINT will be trapped by the
# SIGINT signal handler above. However, gevent does "something" with
# signals and our signal handler never fires. However, we *do* get a
# KeyboardInterrupt here in that case. *shrug*
#
# See: https://github.com/gittip/aspen-python/issues/196
pass
except:
import aspen, traceback
aspen.log_dammit("Oh no! Aspen crashed!")
aspen.log_dammit(traceback.format_exc())
if __name__ == '__main__':
main()
|
8b545ee63ec695a77ba08fa5ff45b7d6dd3d94f8
|
cuteshop/downloaders/git.py
|
cuteshop/downloaders/git.py
|
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', source_info['tag']),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
|
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def _checkout(name):
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', name),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
_checkout(source_info['tag'])
elif 'branch' in source_info:
_checkout(source_info['branch'])
|
Add auto branch checkout functionality
|
Add auto branch checkout functionality
|
Python
|
mit
|
uranusjr/cuteshop
|
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
+
+
+ def _checkout(name):
+ with change_working_directory(DOWNLOAD_CONTAINER):
+ subprocess.call(
+ ('git', 'checkout', name),
+ stdout=DEVNULL, stderr=subprocess.STDOUT,
+ )
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
- with change_working_directory(DOWNLOAD_CONTAINER):
- subprocess.call(
- ('git', 'checkout', source_info['tag']),
+ _checkout(source_info['tag'])
- stdout=DEVNULL, stderr=subprocess.STDOUT,
- )
+ elif 'branch' in source_info:
+ _checkout(source_info['branch'])
|
Add auto branch checkout functionality
|
## Code Before:
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', source_info['tag']),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
## Instruction:
Add auto branch checkout functionality
## Code After:
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def _checkout(name):
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', name),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
_checkout(source_info['tag'])
elif 'branch' in source_info:
_checkout(source_info['branch'])
|
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
+
+
+ def _checkout(name):
+ with change_working_directory(DOWNLOAD_CONTAINER):
+ subprocess.call(
+ ('git', 'checkout', name),
+ stdout=DEVNULL, stderr=subprocess.STDOUT,
+ )
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
- with change_working_directory(DOWNLOAD_CONTAINER):
- subprocess.call(
- ('git', 'checkout', source_info['tag']),
? ^^^^^^^^^^^^^^^^^ ^^^ -
+ _checkout(source_info['tag'])
? ^ ^
- stdout=DEVNULL, stderr=subprocess.STDOUT,
- )
+ elif 'branch' in source_info:
+ _checkout(source_info['branch'])
|
e3db38f0de04ab3e1126f3417fcdd99ab7d2e81c
|
flask_ldap_login/check.py
|
flask_ldap_login/check.py
|
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
Use getpass to get password
|
Use getpass to get password
|
Python
|
bsd-2-clause
|
ContinuumIO/flask-ldap-login,ContinuumIO/flask-ldap-login
|
from argparse import ArgumentParser
from pprint import pprint
+ import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
+ parser.add_argument('-u', '--username', help='Ldap login with this username')
+ parser.add_argument('-p', '--password', help='Ldap login with this password')
- parser.add_argument('-u', '--username', required=True,
- help='Ldap login with this username')
- parser.add_argument('-p', '--password', required=True,
- help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
+ username = args.username or raw_input('Username: ')
+ password = args.password or getpass.getpass()
+
app.ldap_login_manager.set_raise_errors()
try:
- userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
+ userdata = app.ldap_login_manager.ldap_login(username, password)
- print("Got userdata for %s" % args.username)
+ print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
Use getpass to get password
|
## Code Before:
from argparse import ArgumentParser
from pprint import pprint
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', required=True,
help='Ldap login with this username')
parser.add_argument('-p', '--password', required=True,
help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
print("Got userdata for %s" % args.username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
## Instruction:
Use getpass to get password
## Code After:
from argparse import ArgumentParser
from pprint import pprint
import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
parser.add_argument('-u', '--username', help='Ldap login with this username')
parser.add_argument('-p', '--password', help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
username = args.username or raw_input('Username: ')
password = args.password or getpass.getpass()
app.ldap_login_manager.set_raise_errors()
try:
userdata = app.ldap_login_manager.ldap_login(username, password)
print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
from argparse import ArgumentParser
from pprint import pprint
+ import getpass
from werkzeug.utils import import_string
def main():
parser = ArgumentParser(description=__doc__)
parser.add_argument('app_module',
metavar='APP_MODULE',
help='Python importible flask application e.g. my.module:app')
+ parser.add_argument('-u', '--username', help='Ldap login with this username')
+ parser.add_argument('-p', '--password', help='Ldap login with this password')
- parser.add_argument('-u', '--username', required=True,
- help='Ldap login with this username')
- parser.add_argument('-p', '--password', required=True,
- help='Ldap login with this password')
args = parser.parse_args()
if ':' in args.app_module:
import_name, appname = args.app_module.split(':', 1)
else:
import_name, appname = args.app_module, 'app'
module = import_string(import_name)
app = getattr(module, appname)
+ username = args.username or raw_input('Username: ')
+ password = args.password or getpass.getpass()
+
app.ldap_login_manager.set_raise_errors()
try:
- userdata = app.ldap_login_manager.ldap_login(args.username, args.password)
? ----- -----
+ userdata = app.ldap_login_manager.ldap_login(username, password)
- print("Got userdata for %s" % args.username)
? -----
+ print("Got userdata for %s" % username)
pprint(userdata)
except Exception as e:
print("User not found")
pprint(e)
if __name__ == '__main__':
main()
|
2989c7074853266fd134a10df4afdcb700499203
|
analyticsdataserver/urls.py
|
analyticsdataserver/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
Update string arg to url() to callable
|
Update string arg to url() to callable
|
Python
|
agpl-3.0
|
Stanford-Online/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
+ from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
- url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token'),
+ url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
Update string arg to url() to callable
|
## Code Before:
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
## Instruction:
Update string arg to url() to callable
## Code After:
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
+ from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
- url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token'),
+ url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
f8aa722b9b56ca543f73a40f22fd682a1c71fb4c
|
clowder_server/management/commands/send_alerts.py
|
clowder_server/management/commands/send_alerts.py
|
import datetime
from django.core.management.base import BaseCommand, CommandError
from clowder_server.emailer import send_alert
from clowder_server.models import Alert
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
import datetime
from django.core.management.base import BaseCommand, CommandError
from clowder_account.models import ClowderUser
from clowder_server.emailer import send_alert
from clowder_server.models import Alert, Ping
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
# delete old pings
for user in ClowderUser.objects.all():
pings = Ping.objects.filter(user=user)[:500]
pings = list(pings) # forces database hit
Ping.objects.exclude(pk__in=pings).delete()
# send alerts
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
Delete old unused pings from users
|
Delete old unused pings from users
|
Python
|
agpl-3.0
|
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server
|
import datetime
from django.core.management.base import BaseCommand, CommandError
+ from clowder_account.models import ClowderUser
from clowder_server.emailer import send_alert
- from clowder_server.models import Alert
+ from clowder_server.models import Alert, Ping
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
+
+ # delete old pings
+ for user in ClowderUser.objects.all():
+ pings = Ping.objects.filter(user=user)[:500]
+ pings = list(pings) # forces database hit
+ Ping.objects.exclude(pk__in=pings).delete()
+
+ # send alerts
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
Delete old unused pings from users
|
## Code Before:
import datetime
from django.core.management.base import BaseCommand, CommandError
from clowder_server.emailer import send_alert
from clowder_server.models import Alert
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
## Instruction:
Delete old unused pings from users
## Code After:
import datetime
from django.core.management.base import BaseCommand, CommandError
from clowder_account.models import ClowderUser
from clowder_server.emailer import send_alert
from clowder_server.models import Alert, Ping
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
# delete old pings
for user in ClowderUser.objects.all():
pings = Ping.objects.filter(user=user)[:500]
pings = list(pings) # forces database hit
Ping.objects.exclude(pk__in=pings).delete()
# send alerts
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
import datetime
from django.core.management.base import BaseCommand, CommandError
+ from clowder_account.models import ClowderUser
from clowder_server.emailer import send_alert
- from clowder_server.models import Alert
+ from clowder_server.models import Alert, Ping
? ++++++
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
+
+ # delete old pings
+ for user in ClowderUser.objects.all():
+ pings = Ping.objects.filter(user=user)[:500]
+ pings = list(pings) # forces database hit
+ Ping.objects.exclude(pk__in=pings).delete()
+
+ # send alerts
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
c76734ea034f2a48de0eab995c5db5667086e0c8
|
common/util/log.py
|
common/util/log.py
|
import sublime
def universal_newlines(string):
return string.replace('\r\n', '\n').replace('\r', '\n')
def panel(message, run_async=True):
message = universal_newlines(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_display_panel", {"msg": message})
)
else:
view.run_command("gs_display_panel", {"msg": message})
def panel_append(message, run_async=True):
message = universal_newlines(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_append_panel", {"msg": message})
)
else:
view.run_command("gs_append_panel", {"msg": message})
|
import re
import sublime
ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
def normalize(string):
return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n'))
def panel(message, run_async=True):
message = normalize(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_display_panel", {"msg": message})
)
else:
view.run_command("gs_display_panel", {"msg": message})
def panel_append(message, run_async=True):
message = normalize(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_append_panel", {"msg": message})
)
else:
view.run_command("gs_append_panel", {"msg": message})
|
Remove ANSI escape sequences from panel output
|
Remove ANSI escape sequences from panel output
|
Python
|
mit
|
divmain/GitSavvy,divmain/GitSavvy,divmain/GitSavvy
|
+ import re
import sublime
- def universal_newlines(string):
+ ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
+
+
+ def normalize(string):
- return string.replace('\r\n', '\n').replace('\r', '\n')
+ return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n'))
def panel(message, run_async=True):
- message = universal_newlines(str(message))
+ message = normalize(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_display_panel", {"msg": message})
)
else:
view.run_command("gs_display_panel", {"msg": message})
def panel_append(message, run_async=True):
- message = universal_newlines(str(message))
+ message = normalize(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_append_panel", {"msg": message})
)
else:
view.run_command("gs_append_panel", {"msg": message})
|
Remove ANSI escape sequences from panel output
|
## Code Before:
import sublime
def universal_newlines(string):
return string.replace('\r\n', '\n').replace('\r', '\n')
def panel(message, run_async=True):
message = universal_newlines(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_display_panel", {"msg": message})
)
else:
view.run_command("gs_display_panel", {"msg": message})
def panel_append(message, run_async=True):
message = universal_newlines(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_append_panel", {"msg": message})
)
else:
view.run_command("gs_append_panel", {"msg": message})
## Instruction:
Remove ANSI escape sequences from panel output
## Code After:
import re
import sublime
ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
def normalize(string):
return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n'))
def panel(message, run_async=True):
message = normalize(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_display_panel", {"msg": message})
)
else:
view.run_command("gs_display_panel", {"msg": message})
def panel_append(message, run_async=True):
message = normalize(str(message))
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_append_panel", {"msg": message})
)
else:
view.run_command("gs_append_panel", {"msg": message})
|
+ import re
import sublime
- def universal_newlines(string):
+ ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')
+
+
+ def normalize(string):
- return string.replace('\r\n', '\n').replace('\r', '\n')
+ return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n'))
? +++++++++++++++++++++++ +
def panel(message, run_async=True):
- message = universal_newlines(str(message))
? - ^^^ ^ ^^ ------
+ message = normalize(str(message))
? ^ ^ ^^
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_display_panel", {"msg": message})
)
else:
view.run_command("gs_display_panel", {"msg": message})
def panel_append(message, run_async=True):
- message = universal_newlines(str(message))
? - ^^^ ^ ^^ ------
+ message = normalize(str(message))
? ^ ^ ^^
view = sublime.active_window().active_view()
if run_async:
sublime.set_timeout_async(
lambda: view.run_command("gs_append_panel", {"msg": message})
)
else:
view.run_command("gs_append_panel", {"msg": message})
|
649c87174711de93261cd7703e67032281e2e8ee
|
salt/modules/scsi.py
|
salt/modules/scsi.py
|
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
Update formatting to Salt guidelines
|
Update formatting to Salt guidelines
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
+ '''
+ SCSI administration module
+ '''
+
+ import os.path
import logging
+
log = logging.getLogger(__name__)
def lsscsi():
'''
- List scsi devices
+ List SCSI devices
+
CLI Example:
+
.. code-block:: bash
+
- salt '*' scsi.lsscsi
+ salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
+
CLI Example:
+
.. code-block:: bash
+
- salt '*' scsi.rescan_all(0)
+ salt '*' scsi.rescan_all(0)
'''
- if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
+ if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
Update formatting to Salt guidelines
|
## Code Before:
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
## Instruction:
Update formatting to Salt guidelines
## Code After:
'''
SCSI administration module
'''
import os.path
import logging
log = logging.getLogger(__name__)
def lsscsi():
'''
List SCSI devices
CLI Example:
.. code-block:: bash
salt '*' scsi.lsscsi
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
CLI Example:
.. code-block:: bash
salt '*' scsi.rescan_all(0)
'''
if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
+ '''
+ SCSI administration module
+ '''
+
+ import os.path
import logging
+
log = logging.getLogger(__name__)
def lsscsi():
'''
- List scsi devices
? ^^^^
+ List SCSI devices
? ^^^^
+
CLI Example:
+
.. code-block:: bash
+
- salt '*' scsi.lsscsi
+ salt '*' scsi.lsscsi
? ++++
'''
cmd = 'lsscsi'
return __salt__['cmd.run'](cmd).splitlines()
def rescan_all(host):
'''
List scsi devices
+
CLI Example:
+
.. code-block:: bash
+
- salt '*' scsi.rescan_all(0)
+ salt '*' scsi.rescan_all(0)
? ++++
'''
- if os.path.isdir("/sys/class/scsi_host/host{0}").format(host):
? ^ ^
+ if os.path.isdir('/sys/class/scsi_host/host{0}').format(host):
? ^ ^
cmd = 'echo "- - -" > /sys/class/scsi_host/host{0}/scan'.format(host)
else:
return 'Host {0} does not exist'.format(host)
return __salt__['cmd.run'](cmd).splitlines()
|
80638b2070f578408f00d7a263ccfb27fea5b1d4
|
api/base/language.py
|
api/base/language.py
|
from django.utils.translation import ugettext_lazy as _
BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be '
'available to other contributors. Send delete request to new URL to confirm.')
|
from django.utils.translation import ugettext_lazy as _
BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be '
'available to other contributors. Send delete request to new URL with same '
'query parameters to confirm.')
|
Update delete warning to include instructions that same query parameters need to be in request
|
Update delete warning to include instructions that same query parameters need to be in request
|
Python
|
apache-2.0
|
cwisecarver/osf.io,emetsger/osf.io,rdhyee/osf.io,caseyrollins/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,sloria/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,mluke93/osf.io,pattisdr/osf.io,acshi/osf.io,GageGaskins/osf.io,leb2dg/osf.io,zachjanicki/osf.io,amyshi188/osf.io,zamattiac/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,billyhunt/osf.io,chennan47/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,kwierman/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,Ghalko/osf.io,doublebits/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,mluke93/osf.io,brandonPurvis/osf.io,emetsger/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,chennan47/osf.io,binoculars/osf.io,caneruguz/osf.io,SSJohns/osf.io,mluke93/osf.io,billyhunt/osf.io,Nesiehr/osf.io,abought/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,kch8qx/osf.io,caneruguz/osf.io,mfraezz/osf.io,erinspace/osf.io,RomanZWang/osf.io,rdhyee/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,kwierman/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,leb2dg/osf.io,aaxelb/osf.io,GageGaskins/osf.io,acshi/osf.io,icereval/osf.io,samchrisinger/osf.io,hmoco/osf.io,samchrisinger/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,chrisseto/osf.io,adlius/osf.io,billyhunt/osf.io,amyshi188/osf.io,Ghalko/osf.io,caseyrygt/osf.io,sloria/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,wearpants/osf.io,sloria/osf.io,adlius/osf.io,ticklemepierce/osf.io,wearpants/osf.io,mattclark/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,mluke93/osf.io,mattclark/osf.io,TomHeatwole/osf.io,erinspace/osf.io,caseyrygt/osf.io,KAsante95/osf.io,zachjanicki/osf.io,Ghalko/osf.io,SSJohns/osf.io,TomBaxter/osf.io,TomHeatwole/osf.io,aaxelb/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,zamattiac/osf.io,KAsante95/osf.io,binoculars/osf.io,acshi/osf.io,rdhyee/osf.io,pattisdr/osf.io,jnayak1/osf.io,baylee-d/osf.io,kch8qx/osf.io,zachjanicki/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,danielneis/osf.io,chennan47/osf.io,monikagrabowska/osf.io,binoculars/osf.io,kch8qx/osf.io,billyhunt/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,danielneis/osf.io,zamattiac/osf.io,abought/osf.io,cslzchen/osf.io,chrisseto/osf.io,mfraezz/osf.io,jnayak1/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,wearpants/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,abought/osf.io,ticklemepierce/osf.io,doublebits/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,acshi/osf.io,felliott/osf.io,emetsger/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,felliott/osf.io,RomanZWang/osf.io,mluo613/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,doublebits/osf.io,mattclark/osf.io,kch8qx/osf.io,amyshi188/osf.io,billyhunt/osf.io,alexschiller/osf.io,acshi/osf.io,KAsante95/osf.io,felliott/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,abought/osf.io,hmoco/osf.io,mfraezz/osf.io,caseyrollins/osf.io,icereval/osf.io,danielneis/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,caneruguz/osf.io,crcresearch/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,hmoco/osf.io,alexschiller/osf.io,chrisseto/osf.io,aaxelb/osf.io,felliott/osf.io,doublebits/osf.io,danielneis/osf.io,pattisdr/osf.io,adlius/osf.io,mluo613/osf.io,SSJohns/osf.io,emetsger/osf.io,mfraezz/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,mluo613/osf.io,kwierman/osf.io,mluo613/osf.io,alexschiller/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,erinspace/osf.io,kwierman/osf.io,doublebits/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,caseyrygt/osf.io,rdhyee/osf.io,SSJohns/osf.io,adlius/osf.io
|
from django.utils.translation import ugettext_lazy as _
BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be '
- 'available to other contributors. Send delete request to new URL to confirm.')
+ 'available to other contributors. Send delete request to new URL with same '
+ 'query parameters to confirm.')
|
Update delete warning to include instructions that same query parameters need to be in request
|
## Code Before:
from django.utils.translation import ugettext_lazy as _
BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be '
'available to other contributors. Send delete request to new URL to confirm.')
## Instruction:
Update delete warning to include instructions that same query parameters need to be in request
## Code After:
from django.utils.translation import ugettext_lazy as _
BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be '
'available to other contributors. Send delete request to new URL with same '
'query parameters to confirm.')
|
from django.utils.translation import ugettext_lazy as _
BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be '
- 'available to other contributors. Send delete request to new URL to confirm.')
? ^ ^^^^^^ ^ -
+ 'available to other contributors. Send delete request to new URL with same '
? ++ ^ ^^ ^^
+ 'query parameters to confirm.')
|
37cb987503f336362d629619f6f39165f4d8e212
|
utils/snippets.py
|
utils/snippets.py
|
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
os.system('xdotool type --clearmodifiers -- "%s"' % str(snippet_map[selected_key]))
|
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
'sign' : 'Best,\nSameer',
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(snippet_map[selected_key]))
|
Update snippet script to work with newlines.
|
Update snippet script to work with newlines.
|
Python
|
mit
|
sam33r/dotfiles,sam33r/dotfiles,sam33r/dotfiles,sam33r/dotfiles
|
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
+ 'sign' : 'Best,\nSameer',
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
- os.system('xdotool type --clearmodifiers -- "%s"' % str(snippet_map[selected_key]))
+ os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(snippet_map[selected_key]))
+
+
|
Update snippet script to work with newlines.
|
## Code Before:
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
os.system('xdotool type --clearmodifiers -- "%s"' % str(snippet_map[selected_key]))
## Instruction:
Update snippet script to work with newlines.
## Code After:
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
'sign' : 'Best,\nSameer',
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(snippet_map[selected_key]))
|
import sys
import os
import datetime
snippet_map = {
'date' : datetime.datetime.now().strftime('%b %d %G %I:%M%p '),
'time' : datetime.datetime.now().strftime('%I:%M%p '),
+ 'sign' : 'Best,\nSameer',
}
keys = '\n'.join(snippet_map.keys())
result = os.popen('printf "%s" | rofi -dmenu ' % keys)
selected_key = result.read().strip()
+
- os.system('xdotool type --clearmodifiers -- "%s"' % str(snippet_map[selected_key]))
? ^^
+ os.system('sleep 0.1; xdotool type --clearmodifiers "$(printf "%s")"' % str(snippet_map[selected_key]))
? +++++++++++ ^^^^^^^^^ ++
+
|
23d8942ffeeee72e21330bd8ecc5bfb5e91bbc3b
|
certidude/push.py
|
certidude/push.py
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not config.PUSH_PUBLISH:
# Push server disabled
return
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
Add fallbacks for e-mail handling if outbox is not defined
|
Add fallbacks for e-mail handling if outbox is not defined
|
Python
|
mit
|
laurivosandi/certidude,laurivosandi/certidude,plaes/certidude,laurivosandi/certidude,plaes/certidude,plaes/certidude,laurivosandi/certidude,plaes/certidude
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
+ if not config.PUSH_PUBLISH:
+ # Push server disabled
+ return
+
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
Add fallbacks for e-mail handling if outbox is not defined
|
## Code Before:
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
## Instruction:
Add fallbacks for e-mail handling if outbox is not defined
## Code After:
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
if not config.PUSH_PUBLISH:
# Push server disabled
return
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
import click
import json
import logging
import requests
from datetime import datetime
from certidude import config
def publish(event_type, event_data):
"""
Publish event on push server
"""
+ if not config.PUSH_PUBLISH:
+ # Push server disabled
+ return
+
if not isinstance(event_data, basestring):
from certidude.decorators import MyEncoder
event_data = json.dumps(event_data, cls=MyEncoder)
url = config.PUSH_PUBLISH % config.PUSH_TOKEN
click.echo("Publishing %s event '%s' on %s" % (event_type, event_data, url))
try:
notification = requests.post(
url,
data=event_data,
headers={"X-EventSource-Event": event_type, "User-Agent": "Certidude API"})
if notification.status_code == requests.codes.created:
pass # Sent to client
elif notification.status_code == requests.codes.accepted:
pass # Buffered in nchan
else:
click.echo("Failed to submit event to push server, server responded %d" % (
notification.status_code))
except requests.exceptions.ConnectionError:
click.echo("Failed to submit event to push server, connection error")
class PushLogHandler(logging.Handler):
"""
To be used with Python log handling framework for publishing log entries
"""
def emit(self, record):
from certidude.push import publish
publish("log-entry", dict(
created = datetime.utcfromtimestamp(record.created),
message = record.msg % record.args,
severity = record.levelname.lower()))
|
76c44154ca1bc2eeb4e24cc820338c36960b1b5c
|
caniuse/test/test_caniuse.py
|
caniuse/test/test_caniuse.py
|
from __future__ import absolute_import
import pytest
from caniuse.main import check
def test_package_name_has_been_used():
assert 'Sorry' in check('requests')
assert 'Sorry' in check('flask')
assert 'Sorry' in check('pip')
def test_package_name_has_not_been_used():
assert 'Congratulation' in check('this_package_name_has_not_been_used')
assert 'Congratulation' in check('you_will_never_use_this_package_name')
assert 'Congratulation' in check('I_suck_and_my_tests_are_order_dependent')
|
from __future__ import absolute_import
import pytest
from click.testing import CliRunner
from caniuse.main import check
from caniuse.cli import cli
class TestAPI():
def test_package_name_has_been_used(self):
assert 'Sorry' in check('requests')
assert 'Sorry' in check('flask')
assert 'Sorry' in check('pip')
def test_package_name_has_not_been_used(self):
assert 'Congratulation' in check('this_package_name_has_not_been_used')
assert 'Congratulation' in \
check('you_will_never_use_this_package_name')
assert 'Congratulation' in \
check('I_suck_and_my_tests_are_order_dependent')
class TestCLI():
def test_package_name_has_been_used(self):
runner = CliRunner()
result_one = runner.invoke(cli, ['requests'])
assert 'Sorry' in result_one.output
result_two = runner.invoke(cli, ['flask'])
assert 'Sorry' in result_two.output
result_three = runner.invoke(cli, ['pip'])
assert 'Sorry' in result_three.output
def test_package_name_has_not_been_used(self):
runner = CliRunner()
result_one = runner.invoke(
cli, ['this_package_name_has_not_been_used'])
assert 'Congratulation' in result_one.output
result_two = runner.invoke(
cli, ['you_will_never_use_this_package_name'])
assert 'Congratulation' in result_two.output
result_three = runner.invoke(
cli, ['I_suck_and_my_tests_are_order_dependent'])
assert 'Congratulation' in result_three.output
|
Add tests for cli.py to improve code coverage
|
Add tests for cli.py to improve code coverage
|
Python
|
mit
|
lord63/caniuse
|
from __future__ import absolute_import
import pytest
+ from click.testing import CliRunner
from caniuse.main import check
+ from caniuse.cli import cli
+ class TestAPI():
- def test_package_name_has_been_used():
+ def test_package_name_has_been_used(self):
- assert 'Sorry' in check('requests')
+ assert 'Sorry' in check('requests')
- assert 'Sorry' in check('flask')
+ assert 'Sorry' in check('flask')
- assert 'Sorry' in check('pip')
+ assert 'Sorry' in check('pip')
+
+ def test_package_name_has_not_been_used(self):
+ assert 'Congratulation' in check('this_package_name_has_not_been_used')
+ assert 'Congratulation' in \
+ check('you_will_never_use_this_package_name')
+ assert 'Congratulation' in \
+ check('I_suck_and_my_tests_are_order_dependent')
+ class TestCLI():
- def test_package_name_has_not_been_used():
+ def test_package_name_has_been_used(self):
- assert 'Congratulation' in check('this_package_name_has_not_been_used')
- assert 'Congratulation' in check('you_will_never_use_this_package_name')
- assert 'Congratulation' in check('I_suck_and_my_tests_are_order_dependent')
+ runner = CliRunner()
+ result_one = runner.invoke(cli, ['requests'])
+ assert 'Sorry' in result_one.output
+ result_two = runner.invoke(cli, ['flask'])
+ assert 'Sorry' in result_two.output
+
+ result_three = runner.invoke(cli, ['pip'])
+ assert 'Sorry' in result_three.output
+
+ def test_package_name_has_not_been_used(self):
+ runner = CliRunner()
+ result_one = runner.invoke(
+ cli, ['this_package_name_has_not_been_used'])
+ assert 'Congratulation' in result_one.output
+
+ result_two = runner.invoke(
+ cli, ['you_will_never_use_this_package_name'])
+ assert 'Congratulation' in result_two.output
+
+ result_three = runner.invoke(
+ cli, ['I_suck_and_my_tests_are_order_dependent'])
+ assert 'Congratulation' in result_three.output
+
|
Add tests for cli.py to improve code coverage
|
## Code Before:
from __future__ import absolute_import
import pytest
from caniuse.main import check
def test_package_name_has_been_used():
assert 'Sorry' in check('requests')
assert 'Sorry' in check('flask')
assert 'Sorry' in check('pip')
def test_package_name_has_not_been_used():
assert 'Congratulation' in check('this_package_name_has_not_been_used')
assert 'Congratulation' in check('you_will_never_use_this_package_name')
assert 'Congratulation' in check('I_suck_and_my_tests_are_order_dependent')
## Instruction:
Add tests for cli.py to improve code coverage
## Code After:
from __future__ import absolute_import
import pytest
from click.testing import CliRunner
from caniuse.main import check
from caniuse.cli import cli
class TestAPI():
def test_package_name_has_been_used(self):
assert 'Sorry' in check('requests')
assert 'Sorry' in check('flask')
assert 'Sorry' in check('pip')
def test_package_name_has_not_been_used(self):
assert 'Congratulation' in check('this_package_name_has_not_been_used')
assert 'Congratulation' in \
check('you_will_never_use_this_package_name')
assert 'Congratulation' in \
check('I_suck_and_my_tests_are_order_dependent')
class TestCLI():
def test_package_name_has_been_used(self):
runner = CliRunner()
result_one = runner.invoke(cli, ['requests'])
assert 'Sorry' in result_one.output
result_two = runner.invoke(cli, ['flask'])
assert 'Sorry' in result_two.output
result_three = runner.invoke(cli, ['pip'])
assert 'Sorry' in result_three.output
def test_package_name_has_not_been_used(self):
runner = CliRunner()
result_one = runner.invoke(
cli, ['this_package_name_has_not_been_used'])
assert 'Congratulation' in result_one.output
result_two = runner.invoke(
cli, ['you_will_never_use_this_package_name'])
assert 'Congratulation' in result_two.output
result_three = runner.invoke(
cli, ['I_suck_and_my_tests_are_order_dependent'])
assert 'Congratulation' in result_three.output
|
from __future__ import absolute_import
import pytest
+ from click.testing import CliRunner
from caniuse.main import check
+ from caniuse.cli import cli
+ class TestAPI():
- def test_package_name_has_been_used():
+ def test_package_name_has_been_used(self):
? ++++ ++++
- assert 'Sorry' in check('requests')
+ assert 'Sorry' in check('requests')
? ++++
- assert 'Sorry' in check('flask')
+ assert 'Sorry' in check('flask')
? ++++
- assert 'Sorry' in check('pip')
+ assert 'Sorry' in check('pip')
? ++++
+
+ def test_package_name_has_not_been_used(self):
+ assert 'Congratulation' in check('this_package_name_has_not_been_used')
+ assert 'Congratulation' in \
+ check('you_will_never_use_this_package_name')
+ assert 'Congratulation' in \
+ check('I_suck_and_my_tests_are_order_dependent')
+ class TestCLI():
+ def test_package_name_has_been_used(self):
+ runner = CliRunner()
+ result_one = runner.invoke(cli, ['requests'])
+ assert 'Sorry' in result_one.output
+
+ result_two = runner.invoke(cli, ['flask'])
+ assert 'Sorry' in result_two.output
+
+ result_three = runner.invoke(cli, ['pip'])
+ assert 'Sorry' in result_three.output
+
- def test_package_name_has_not_been_used():
+ def test_package_name_has_not_been_used(self):
? ++++ ++++
- assert 'Congratulation' in check('this_package_name_has_not_been_used')
- assert 'Congratulation' in check('you_will_never_use_this_package_name')
- assert 'Congratulation' in check('I_suck_and_my_tests_are_order_dependent')
+ runner = CliRunner()
+ result_one = runner.invoke(
+ cli, ['this_package_name_has_not_been_used'])
+ assert 'Congratulation' in result_one.output
+
+ result_two = runner.invoke(
+ cli, ['you_will_never_use_this_package_name'])
+ assert 'Congratulation' in result_two.output
+
+ result_three = runner.invoke(
+ cli, ['I_suck_and_my_tests_are_order_dependent'])
+ assert 'Congratulation' in result_three.output
|
a3213788d0d8591b235359d4b17886ce3f50ab37
|
tests/test_plugin.py
|
tests/test_plugin.py
|
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
with open('./datajoint.pub', "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
from os import path
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
with open(path.join(path.abspath(
path.dirname(__file__)), '..', 'datajoint.pub'), "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
Make pubkey test more portable.
|
Make pubkey test more portable.
|
Python
|
lgpl-2.1
|
datajoint/datajoint-python,dimitri-yatsenko/datajoint-python
|
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
+ from os import path
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
- with open('./datajoint.pub', "r") as f:
+ with open(path.join(path.abspath(
+ path.dirname(__file__)), '..', 'datajoint.pub'), "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
Make pubkey test more portable.
|
## Code Before:
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
with open('./datajoint.pub', "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
## Instruction:
Make pubkey test more portable.
## Code After:
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
from os import path
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
with open(path.join(path.abspath(
path.dirname(__file__)), '..', 'datajoint.pub'), "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
+ from os import path
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
- with open('./datajoint.pub', "r") as f:
+ with open(path.join(path.abspath(
+ path.dirname(__file__)), '..', 'datajoint.pub'), "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
05ec1e93e04b829b8a71f6837409de1b5c8ead5d
|
bndl/compute/tests/__init__.py
|
bndl/compute/tests/__init__.py
|
import unittest
from bndl.compute.run import create_ctx
from bndl.util.conf import Config
class ComputeTest(unittest.TestCase):
worker_count = 3
@classmethod
def setUpClass(cls):
config = Config()
config['bndl.compute.worker_count'] = cls.worker_count
config['bndl.net.listen_addresses'] = 'tcp://127.0.0.11:5000'
cls.ctx = create_ctx(config, daemon=True)
cls.ctx.await_workers(cls.worker_count)
@classmethod
def tearDownClass(cls):
cls.ctx.stop()
class DatasetTest(ComputeTest):
pass
|
import sys
import unittest
from bndl.compute.run import create_ctx
from bndl.util.conf import Config
class ComputeTest(unittest.TestCase):
worker_count = 3
@classmethod
def setUpClass(cls):
# Increase switching interval to lure out race conditions a bit ...
cls._old_switchinterval = sys.getswitchinterval()
sys.setswitchinterval(1e-6)
config = Config()
config['bndl.compute.worker_count'] = cls.worker_count
config['bndl.net.listen_addresses'] = 'tcp://127.0.0.11:5000'
cls.ctx = create_ctx(config, daemon=True)
cls.ctx.await_workers(cls.worker_count)
@classmethod
def tearDownClass(cls):
cls.ctx.stop()
sys.setswitchinterval(cls._old_switchinterval)
class DatasetTest(ComputeTest):
pass
|
Increase switching interval to lure out race conditions a bit ...
|
Increase switching interval to lure out race conditions a bit ...
|
Python
|
apache-2.0
|
bndl/bndl,bndl/bndl
|
+ import sys
import unittest
from bndl.compute.run import create_ctx
from bndl.util.conf import Config
class ComputeTest(unittest.TestCase):
worker_count = 3
@classmethod
def setUpClass(cls):
+ # Increase switching interval to lure out race conditions a bit ...
+ cls._old_switchinterval = sys.getswitchinterval()
+ sys.setswitchinterval(1e-6)
+
config = Config()
config['bndl.compute.worker_count'] = cls.worker_count
config['bndl.net.listen_addresses'] = 'tcp://127.0.0.11:5000'
cls.ctx = create_ctx(config, daemon=True)
cls.ctx.await_workers(cls.worker_count)
@classmethod
def tearDownClass(cls):
cls.ctx.stop()
+ sys.setswitchinterval(cls._old_switchinterval)
class DatasetTest(ComputeTest):
pass
|
Increase switching interval to lure out race conditions a bit ...
|
## Code Before:
import unittest
from bndl.compute.run import create_ctx
from bndl.util.conf import Config
class ComputeTest(unittest.TestCase):
worker_count = 3
@classmethod
def setUpClass(cls):
config = Config()
config['bndl.compute.worker_count'] = cls.worker_count
config['bndl.net.listen_addresses'] = 'tcp://127.0.0.11:5000'
cls.ctx = create_ctx(config, daemon=True)
cls.ctx.await_workers(cls.worker_count)
@classmethod
def tearDownClass(cls):
cls.ctx.stop()
class DatasetTest(ComputeTest):
pass
## Instruction:
Increase switching interval to lure out race conditions a bit ...
## Code After:
import sys
import unittest
from bndl.compute.run import create_ctx
from bndl.util.conf import Config
class ComputeTest(unittest.TestCase):
worker_count = 3
@classmethod
def setUpClass(cls):
# Increase switching interval to lure out race conditions a bit ...
cls._old_switchinterval = sys.getswitchinterval()
sys.setswitchinterval(1e-6)
config = Config()
config['bndl.compute.worker_count'] = cls.worker_count
config['bndl.net.listen_addresses'] = 'tcp://127.0.0.11:5000'
cls.ctx = create_ctx(config, daemon=True)
cls.ctx.await_workers(cls.worker_count)
@classmethod
def tearDownClass(cls):
cls.ctx.stop()
sys.setswitchinterval(cls._old_switchinterval)
class DatasetTest(ComputeTest):
pass
|
+ import sys
import unittest
from bndl.compute.run import create_ctx
from bndl.util.conf import Config
class ComputeTest(unittest.TestCase):
worker_count = 3
@classmethod
def setUpClass(cls):
+ # Increase switching interval to lure out race conditions a bit ...
+ cls._old_switchinterval = sys.getswitchinterval()
+ sys.setswitchinterval(1e-6)
+
config = Config()
config['bndl.compute.worker_count'] = cls.worker_count
config['bndl.net.listen_addresses'] = 'tcp://127.0.0.11:5000'
cls.ctx = create_ctx(config, daemon=True)
cls.ctx.await_workers(cls.worker_count)
@classmethod
def tearDownClass(cls):
cls.ctx.stop()
+ sys.setswitchinterval(cls._old_switchinterval)
class DatasetTest(ComputeTest):
pass
|
787c8a1f1f000b75095fab5cc6b3e8e5d4ef60d8
|
usingnamespace/models/Domain.py
|
usingnamespace/models/Domain.py
|
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
Change from unicode to string for domain
|
Change from unicode to string for domain
DNS entries won't contain unicode characters, and by default are ASCII.
|
Python
|
isc
|
usingnamespace/usingnamespace
|
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
+ Integer,
+ PrimaryKeyConstraint,
+ String,
Table,
- Integer,
Unicode,
- PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
- Column('domain', Unicode(256), index=True, unique=True),
+ Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
Change from unicode to string for domain
|
## Code Before:
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
## Instruction:
Change from unicode to string for domain
## Code After:
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
+ Integer,
+ PrimaryKeyConstraint,
+ String,
Table,
- Integer,
Unicode,
- PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
- Column('domain', Unicode(256), index=True, unique=True),
? ^ ^^^^^
+ Column('domain', String(256), index=True, unique=True),
? ^^^^ ^
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
1ed49dae9d88e1e277a0eef879dec53ed925417a
|
highlander/exceptions.py
|
highlander/exceptions.py
|
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
|
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
class InvalidPidDirectoryError(Exception):
""" An exception when an invalid PID directory is detected."""
|
Add a new exception since we are making a directory now.
|
Add a new exception since we are making a directory now.
|
Python
|
mit
|
chriscannon/highlander
|
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
+ class InvalidPidDirectoryError(Exception):
+ """ An exception when an invalid PID directory is detected."""
+
|
Add a new exception since we are making a directory now.
|
## Code Before:
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
## Instruction:
Add a new exception since we are making a directory now.
## Code After:
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
class InvalidPidDirectoryError(Exception):
""" An exception when an invalid PID directory is detected."""
|
class InvalidPidFileError(Exception):
""" An exception when an invalid PID file is read."""
class PidFileExistsError(Exception):
""" An exception when a PID file already exists."""
+
+ class InvalidPidDirectoryError(Exception):
+ """ An exception when an invalid PID directory is detected."""
|
d86fe37bb29cc8c09c4659de579d4c370a59c40b
|
scripts/container_log_collector.py
|
scripts/container_log_collector.py
|
import os
from pathlib import Path
from pathlib import PosixPath
import subprocess
# Make a log directory
log_path = Path("logs")
log_path.mkdir(exist_ok=True)
# Get the github job name and create a directory for it
job_name = os.getenv("GITHUB_JOB")
job_path: PosixPath = log_path / job_name
job_path.mkdir(exist_ok=True)
# Get all the containers running (per job)
containers = (
subprocess.check_output("docker ps --format '{{.Names}}'", shell=True)
.decode("utf-8")
.split()
)
# Loop through the container ids and create a log file for each in the job directory
for container in containers:
# Get the container name
container_name = container.replace("'", "")
# Get the container logs
container_logs = subprocess.check_output(
"docker logs " + container_name, shell=True, stderr=subprocess.STDOUT
).decode("utf-8")
path = job_path / container_name
path.write_text(container_logs)
stored_files = list(job_path.iterdir())
for file in stored_files:
print(file)
print("============Log export completed for job: ", job_name)
|
import os
from pathlib import Path
from pathlib import PosixPath
import subprocess
# Make a log directory
log_path = Path("logs")
log_path.mkdir(exist_ok=True)
# Get the github job name and create a directory for it
job_name = os.getenv("GITHUB_JOB")
job_path: PosixPath = log_path / job_name
job_path.mkdir(exist_ok=True)
# Get all the containers running (per job)
containers = (
subprocess.check_output("docker ps --format '{{.Names}}'", shell=True)
.decode("utf-8")
.split()
)
# Loop through the container ids and create a log file for each in the job directory
for container in containers:
# Get the container name
container_name = container.replace("'", "")
# Get the container logs
container_logs = subprocess.check_output(
"docker logs " + container_name, shell=True, stderr=subprocess.STDOUT
).decode("utf-8")
path = job_path / container_name
path.write_text(container_logs, encoding="utf-8")
stored_files = list(job_path.iterdir())
for file in stored_files:
print(file)
print("============Log export completed for job: ", job_name)
|
Set docker log encoding to utf-8
|
Set docker log encoding to utf-8
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
import os
from pathlib import Path
from pathlib import PosixPath
import subprocess
# Make a log directory
log_path = Path("logs")
log_path.mkdir(exist_ok=True)
# Get the github job name and create a directory for it
job_name = os.getenv("GITHUB_JOB")
job_path: PosixPath = log_path / job_name
job_path.mkdir(exist_ok=True)
# Get all the containers running (per job)
containers = (
subprocess.check_output("docker ps --format '{{.Names}}'", shell=True)
.decode("utf-8")
.split()
)
# Loop through the container ids and create a log file for each in the job directory
for container in containers:
# Get the container name
container_name = container.replace("'", "")
# Get the container logs
container_logs = subprocess.check_output(
"docker logs " + container_name, shell=True, stderr=subprocess.STDOUT
).decode("utf-8")
path = job_path / container_name
- path.write_text(container_logs)
+ path.write_text(container_logs, encoding="utf-8")
stored_files = list(job_path.iterdir())
for file in stored_files:
print(file)
print("============Log export completed for job: ", job_name)
|
Set docker log encoding to utf-8
|
## Code Before:
import os
from pathlib import Path
from pathlib import PosixPath
import subprocess
# Make a log directory
log_path = Path("logs")
log_path.mkdir(exist_ok=True)
# Get the github job name and create a directory for it
job_name = os.getenv("GITHUB_JOB")
job_path: PosixPath = log_path / job_name
job_path.mkdir(exist_ok=True)
# Get all the containers running (per job)
containers = (
subprocess.check_output("docker ps --format '{{.Names}}'", shell=True)
.decode("utf-8")
.split()
)
# Loop through the container ids and create a log file for each in the job directory
for container in containers:
# Get the container name
container_name = container.replace("'", "")
# Get the container logs
container_logs = subprocess.check_output(
"docker logs " + container_name, shell=True, stderr=subprocess.STDOUT
).decode("utf-8")
path = job_path / container_name
path.write_text(container_logs)
stored_files = list(job_path.iterdir())
for file in stored_files:
print(file)
print("============Log export completed for job: ", job_name)
## Instruction:
Set docker log encoding to utf-8
## Code After:
import os
from pathlib import Path
from pathlib import PosixPath
import subprocess
# Make a log directory
log_path = Path("logs")
log_path.mkdir(exist_ok=True)
# Get the github job name and create a directory for it
job_name = os.getenv("GITHUB_JOB")
job_path: PosixPath = log_path / job_name
job_path.mkdir(exist_ok=True)
# Get all the containers running (per job)
containers = (
subprocess.check_output("docker ps --format '{{.Names}}'", shell=True)
.decode("utf-8")
.split()
)
# Loop through the container ids and create a log file for each in the job directory
for container in containers:
# Get the container name
container_name = container.replace("'", "")
# Get the container logs
container_logs = subprocess.check_output(
"docker logs " + container_name, shell=True, stderr=subprocess.STDOUT
).decode("utf-8")
path = job_path / container_name
path.write_text(container_logs, encoding="utf-8")
stored_files = list(job_path.iterdir())
for file in stored_files:
print(file)
print("============Log export completed for job: ", job_name)
|
import os
from pathlib import Path
from pathlib import PosixPath
import subprocess
# Make a log directory
log_path = Path("logs")
log_path.mkdir(exist_ok=True)
# Get the github job name and create a directory for it
job_name = os.getenv("GITHUB_JOB")
job_path: PosixPath = log_path / job_name
job_path.mkdir(exist_ok=True)
# Get all the containers running (per job)
containers = (
subprocess.check_output("docker ps --format '{{.Names}}'", shell=True)
.decode("utf-8")
.split()
)
# Loop through the container ids and create a log file for each in the job directory
for container in containers:
# Get the container name
container_name = container.replace("'", "")
# Get the container logs
container_logs = subprocess.check_output(
"docker logs " + container_name, shell=True, stderr=subprocess.STDOUT
).decode("utf-8")
path = job_path / container_name
- path.write_text(container_logs)
+ path.write_text(container_logs, encoding="utf-8")
? ++++++++++++++++++
stored_files = list(job_path.iterdir())
for file in stored_files:
print(file)
print("============Log export completed for job: ", job_name)
|
7f2e91064eabc020cbe660639713278fc187a034
|
tests/test_result.py
|
tests/test_result.py
|
import pytest
from serfclient import result
class TestSerfResult(object):
def test_initialises_to_none(self):
r = result.SerfResult()
assert r.head is None
assert r.body is None
def test_provides_a_pretty_printed_form_for_repl_use(self):
r = result.SerfResult(head={"a": 1}, body=('foo', 'bar'))
assert str(r) == \
"SerfResult<head={'a': 1},body=('foo', 'bar')>"
def test_can_convert_to_list(self):
r = result.SerfResult(head=1, body=2)
assert sorted(list(r)) == [1, 2]
def test_can_convert_to_tuple(self):
r = result.SerfResult(head=1, body=2)
assert sorted(tuple(r)) == [1, 2]
|
from serfclient import result
class TestSerfResult(object):
def test_initialises_to_none(self):
r = result.SerfResult()
assert r.head is None
assert r.body is None
def test_provides_a_pretty_printed_form_for_repl_use(self):
r = result.SerfResult(head={"a": 1}, body=('foo', 'bar'))
assert str(r) == \
"SerfResult<head={'a': 1},body=('foo', 'bar')>"
def test_can_convert_to_list(self):
r = result.SerfResult(head=1, body=2)
assert sorted(list(r)) == [1, 2]
def test_can_convert_to_tuple(self):
r = result.SerfResult(head=1, body=2)
assert sorted(tuple(r)) == [1, 2]
|
Remove unused import of pytest
|
Remove unused import of pytest
|
Python
|
mit
|
charleswhchan/serfclient-py,KushalP/serfclient-py
|
- import pytest
-
from serfclient import result
class TestSerfResult(object):
def test_initialises_to_none(self):
r = result.SerfResult()
assert r.head is None
assert r.body is None
def test_provides_a_pretty_printed_form_for_repl_use(self):
r = result.SerfResult(head={"a": 1}, body=('foo', 'bar'))
assert str(r) == \
"SerfResult<head={'a': 1},body=('foo', 'bar')>"
def test_can_convert_to_list(self):
r = result.SerfResult(head=1, body=2)
assert sorted(list(r)) == [1, 2]
def test_can_convert_to_tuple(self):
r = result.SerfResult(head=1, body=2)
assert sorted(tuple(r)) == [1, 2]
|
Remove unused import of pytest
|
## Code Before:
import pytest
from serfclient import result
class TestSerfResult(object):
def test_initialises_to_none(self):
r = result.SerfResult()
assert r.head is None
assert r.body is None
def test_provides_a_pretty_printed_form_for_repl_use(self):
r = result.SerfResult(head={"a": 1}, body=('foo', 'bar'))
assert str(r) == \
"SerfResult<head={'a': 1},body=('foo', 'bar')>"
def test_can_convert_to_list(self):
r = result.SerfResult(head=1, body=2)
assert sorted(list(r)) == [1, 2]
def test_can_convert_to_tuple(self):
r = result.SerfResult(head=1, body=2)
assert sorted(tuple(r)) == [1, 2]
## Instruction:
Remove unused import of pytest
## Code After:
from serfclient import result
class TestSerfResult(object):
def test_initialises_to_none(self):
r = result.SerfResult()
assert r.head is None
assert r.body is None
def test_provides_a_pretty_printed_form_for_repl_use(self):
r = result.SerfResult(head={"a": 1}, body=('foo', 'bar'))
assert str(r) == \
"SerfResult<head={'a': 1},body=('foo', 'bar')>"
def test_can_convert_to_list(self):
r = result.SerfResult(head=1, body=2)
assert sorted(list(r)) == [1, 2]
def test_can_convert_to_tuple(self):
r = result.SerfResult(head=1, body=2)
assert sorted(tuple(r)) == [1, 2]
|
- import pytest
-
from serfclient import result
class TestSerfResult(object):
def test_initialises_to_none(self):
r = result.SerfResult()
assert r.head is None
assert r.body is None
def test_provides_a_pretty_printed_form_for_repl_use(self):
r = result.SerfResult(head={"a": 1}, body=('foo', 'bar'))
assert str(r) == \
"SerfResult<head={'a': 1},body=('foo', 'bar')>"
def test_can_convert_to_list(self):
r = result.SerfResult(head=1, body=2)
assert sorted(list(r)) == [1, 2]
def test_can_convert_to_tuple(self):
r = result.SerfResult(head=1, body=2)
assert sorted(tuple(r)) == [1, 2]
|
53ba55615fbd02e83212aecaa0c37d1887adfc73
|
tests/test_tracer.py
|
tests/test_tracer.py
|
import unittest
import sys
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
def check(body):
with self.assertRaises(ValueError):
exec("""
from birdseye.tracer import TreeTracerBase
@TreeTracerBase()
async def f(): """ + body)
check('pass')
if sys.version_info >= (3, 6):
check('yield 1')
|
import sys
import unittest
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
from birdseye.tracer import TreeTracerBase
tracer = TreeTracerBase()
with self.assertRaises(ValueError):
exec("""
@tracer
async def f(): pass""")
if sys.version_info >= (3, 6):
with self.assertRaises(ValueError):
exec("""
@tracer
async def f(): yield 1""")
|
Fix inner exec syntax error in python 2.7
|
Fix inner exec syntax error in python 2.7
|
Python
|
mit
|
alexmojaki/birdseye,alexmojaki/birdseye,alexmojaki/birdseye,alexmojaki/birdseye
|
+ import sys
import unittest
-
- import sys
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
- def check(body):
+ from birdseye.tracer import TreeTracerBase
+ tracer = TreeTracerBase()
+ with self.assertRaises(ValueError):
+ exec("""
+ @tracer
+ async def f(): pass""")
+
+ if sys.version_info >= (3, 6):
with self.assertRaises(ValueError):
exec("""
+ @tracer
+ async def f(): yield 1""")
- from birdseye.tracer import TreeTracerBase
- @TreeTracerBase()
- async def f(): """ + body)
- check('pass')
-
- if sys.version_info >= (3, 6):
- check('yield 1')
-
|
Fix inner exec syntax error in python 2.7
|
## Code Before:
import unittest
import sys
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
def check(body):
with self.assertRaises(ValueError):
exec("""
from birdseye.tracer import TreeTracerBase
@TreeTracerBase()
async def f(): """ + body)
check('pass')
if sys.version_info >= (3, 6):
check('yield 1')
## Instruction:
Fix inner exec syntax error in python 2.7
## Code After:
import sys
import unittest
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
from birdseye.tracer import TreeTracerBase
tracer = TreeTracerBase()
with self.assertRaises(ValueError):
exec("""
@tracer
async def f(): pass""")
if sys.version_info >= (3, 6):
with self.assertRaises(ValueError):
exec("""
@tracer
async def f(): yield 1""")
|
+ import sys
import unittest
-
- import sys
from tests.utils import requires_python_version
class TestTreeTrace(unittest.TestCase):
maxDiff = None
@requires_python_version(3.5)
def test_async_forbidden(self):
- def check(body):
+ from birdseye.tracer import TreeTracerBase
+ tracer = TreeTracerBase()
+ with self.assertRaises(ValueError):
+ exec("""
+ @tracer
+ async def f(): pass""")
+
+ if sys.version_info >= (3, 6):
with self.assertRaises(ValueError):
exec("""
+ @tracer
+ async def f(): yield 1""")
- from birdseye.tracer import TreeTracerBase
- @TreeTracerBase()
- async def f(): """ + body)
-
- check('pass')
-
- if sys.version_info >= (3, 6):
- check('yield 1')
|
aac598d64fc0fa50cc068fc50173068e5d89b3fd
|
segpy/ext/numpyext.py
|
segpy/ext/numpyext.py
|
"""Optional interoperability with Numpy."""
import numpy
NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
'l': numpy.dtype('i4'),
'h': numpy.dtype('i2'),
'f': numpy.dtype('f4'),
'b': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
|
"""Optional interoperability with Numpy."""
import numpy
NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
'int32': numpy.dtype('i4'),
'int16': numpy.dtype('i2'),
'float32': numpy.dtype('f4'),
'int8': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
|
Update numpy dtypes extension for correct type codes.
|
Update numpy dtypes extension for correct type codes.
|
Python
|
agpl-3.0
|
hohogpb/segpy,stevejpurves/segpy,abingham/segpy,asbjorn/segpy,kjellkongsvik/segpy,Kramer477/segpy,kwinkunks/segpy
|
"""Optional interoperability with Numpy."""
import numpy
- NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
+ NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
- 'l': numpy.dtype('i4'),
+ 'int32': numpy.dtype('i4'),
- 'h': numpy.dtype('i2'),
+ 'int16': numpy.dtype('i2'),
- 'f': numpy.dtype('f4'),
+ 'float32': numpy.dtype('f4'),
- 'b': numpy.dtype('i1')}
+ 'int8': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
|
Update numpy dtypes extension for correct type codes.
|
## Code Before:
"""Optional interoperability with Numpy."""
import numpy
NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
'l': numpy.dtype('i4'),
'h': numpy.dtype('i2'),
'f': numpy.dtype('f4'),
'b': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
## Instruction:
Update numpy dtypes extension for correct type codes.
## Code After:
"""Optional interoperability with Numpy."""
import numpy
NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
'int32': numpy.dtype('i4'),
'int16': numpy.dtype('i2'),
'float32': numpy.dtype('f4'),
'int8': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
|
"""Optional interoperability with Numpy."""
import numpy
- NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
+ NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
? ++++
- 'l': numpy.dtype('i4'),
? ^
+ 'int32': numpy.dtype('i4'),
? ^^^^^
- 'h': numpy.dtype('i2'),
? ^
+ 'int16': numpy.dtype('i2'),
? ^^^^^
- 'f': numpy.dtype('f4'),
? --
+ 'float32': numpy.dtype('f4'),
? ++++++
- 'b': numpy.dtype('i1')}
? ^
+ 'int8': numpy.dtype('i1')}
? ^^^^ +
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
|
e9964a0f96777c5aae83349ccde3d14fbd04353b
|
contrib/generate-gresource-xml.py
|
contrib/generate-gresource-xml.py
|
import sys
import os
import xml.etree.ElementTree as ET
if len(sys.argv) < 2:
print("not enough arguments")
sys.exit(1)
root = ET.Element("gresources")
n_gresource = ET.SubElement(root, "gresource", {"prefix": "/org/freedesktop/fwupd"})
for fn in sorted(sys.argv[2:]):
n_file = ET.SubElement(n_gresource, "file", {"compressed": "true"})
n_file.text = fn
if fn.endswith(".xml"):
n_file.set("preprocess", "xml-stripblanks")
n_file.set("alias", os.path.basename(fn))
with open(sys.argv[1], "wb") as f:
f.write(ET.tostring(root, "utf-8", xml_declaration=True))
sys.exit(0)
|
import sys
import os
import xml.etree.ElementTree as ET
if len(sys.argv) < 2:
print("not enough arguments")
sys.exit(1)
root = ET.Element("gresources")
n_gresource = ET.SubElement(root, "gresource", {"prefix": "/org/freedesktop/fwupd"})
for fn in sorted(sys.argv[2:]):
n_file = ET.SubElement(n_gresource, "file", {"compressed": "true"})
n_file.text = fn
if fn.endswith(".xml"):
n_file.set("preprocess", "xml-stripblanks")
n_file.set("alias", os.path.basename(fn))
with open(sys.argv[1], "wb") as f:
try:
f.write(ET.tostring(root, "utf-8", xml_declaration=True))
except TypeError:
f.write(ET.tostring(root, "utf-8"))
sys.exit(0)
|
Fix compile when using python 3.7 or older
|
trivial: Fix compile when using python 3.7 or older
Signed-off-by: Richard Hughes <[email protected]>
|
Python
|
lgpl-2.1
|
fwupd/fwupd,fwupd/fwupd,fwupd/fwupd,fwupd/fwupd
|
import sys
import os
import xml.etree.ElementTree as ET
if len(sys.argv) < 2:
print("not enough arguments")
sys.exit(1)
root = ET.Element("gresources")
n_gresource = ET.SubElement(root, "gresource", {"prefix": "/org/freedesktop/fwupd"})
for fn in sorted(sys.argv[2:]):
n_file = ET.SubElement(n_gresource, "file", {"compressed": "true"})
n_file.text = fn
if fn.endswith(".xml"):
n_file.set("preprocess", "xml-stripblanks")
n_file.set("alias", os.path.basename(fn))
with open(sys.argv[1], "wb") as f:
+ try:
- f.write(ET.tostring(root, "utf-8", xml_declaration=True))
+ f.write(ET.tostring(root, "utf-8", xml_declaration=True))
+ except TypeError:
+ f.write(ET.tostring(root, "utf-8"))
sys.exit(0)
|
Fix compile when using python 3.7 or older
|
## Code Before:
import sys
import os
import xml.etree.ElementTree as ET
if len(sys.argv) < 2:
print("not enough arguments")
sys.exit(1)
root = ET.Element("gresources")
n_gresource = ET.SubElement(root, "gresource", {"prefix": "/org/freedesktop/fwupd"})
for fn in sorted(sys.argv[2:]):
n_file = ET.SubElement(n_gresource, "file", {"compressed": "true"})
n_file.text = fn
if fn.endswith(".xml"):
n_file.set("preprocess", "xml-stripblanks")
n_file.set("alias", os.path.basename(fn))
with open(sys.argv[1], "wb") as f:
f.write(ET.tostring(root, "utf-8", xml_declaration=True))
sys.exit(0)
## Instruction:
Fix compile when using python 3.7 or older
## Code After:
import sys
import os
import xml.etree.ElementTree as ET
if len(sys.argv) < 2:
print("not enough arguments")
sys.exit(1)
root = ET.Element("gresources")
n_gresource = ET.SubElement(root, "gresource", {"prefix": "/org/freedesktop/fwupd"})
for fn in sorted(sys.argv[2:]):
n_file = ET.SubElement(n_gresource, "file", {"compressed": "true"})
n_file.text = fn
if fn.endswith(".xml"):
n_file.set("preprocess", "xml-stripblanks")
n_file.set("alias", os.path.basename(fn))
with open(sys.argv[1], "wb") as f:
try:
f.write(ET.tostring(root, "utf-8", xml_declaration=True))
except TypeError:
f.write(ET.tostring(root, "utf-8"))
sys.exit(0)
|
import sys
import os
import xml.etree.ElementTree as ET
if len(sys.argv) < 2:
print("not enough arguments")
sys.exit(1)
root = ET.Element("gresources")
n_gresource = ET.SubElement(root, "gresource", {"prefix": "/org/freedesktop/fwupd"})
for fn in sorted(sys.argv[2:]):
n_file = ET.SubElement(n_gresource, "file", {"compressed": "true"})
n_file.text = fn
if fn.endswith(".xml"):
n_file.set("preprocess", "xml-stripblanks")
n_file.set("alias", os.path.basename(fn))
with open(sys.argv[1], "wb") as f:
+ try:
- f.write(ET.tostring(root, "utf-8", xml_declaration=True))
+ f.write(ET.tostring(root, "utf-8", xml_declaration=True))
? ++++
+ except TypeError:
+ f.write(ET.tostring(root, "utf-8"))
sys.exit(0)
|
1ef1d7a973ce44943fc59315d1f962ed59f06e33
|
seacucumber/backend.py
|
seacucumber/backend.py
|
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
message.message().as_string(),
)
num_sent += 1
return num_sent
|
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
message.message().as_string().decode('utf8'),
)
num_sent += 1
return num_sent
|
Patch to send mails with UTF8 encoding
|
Patch to send mails with UTF8 encoding
Just a temp fix
|
Python
|
mit
|
makielab/sea-cucumber,duointeractive/sea-cucumber
|
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
+
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
-
+
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
- message.message().as_string(),
+ message.message().as_string().decode('utf8'),
)
num_sent += 1
return num_sent
-
|
Patch to send mails with UTF8 encoding
|
## Code Before:
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
message.message().as_string(),
)
num_sent += 1
return num_sent
## Instruction:
Patch to send mails with UTF8 encoding
## Code After:
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
message.message().as_string().decode('utf8'),
)
num_sent += 1
return num_sent
|
from django.core.mail.backends.base import BaseEmailBackend
from seacucumber.tasks import SendEmailTask
+
class SESBackend(BaseEmailBackend):
"""
A Django Email backend that uses Amazon's Simple Email Service.
"""
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of
email messages sent.
-
+
:param EmailMessage email_messages: A list of Django's EmailMessage
object instances.
:rtype: int
:returns: The number of EmailMessage objects that were successfully
queued up. Note that these are not in a state where we can
guarantee delivery just yet.
"""
num_sent = 0
for message in email_messages:
# Hand this off to a celery task.
SendEmailTask.delay(
message.from_email,
message.recipients(),
- message.message().as_string(),
+ message.message().as_string().decode('utf8'),
? +++++++++++++++
)
num_sent += 1
return num_sent
-
|
e4427016abdc7ef146cd7550f2ac1dace07be442
|
plinky.py
|
plinky.py
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
|
Remove debug flag from app
|
Remove debug flag from app
|
Python
|
mit
|
RaspberryPiFoundation/plinky,CodeClub/plinky,codecleaner/plinky,codecleaner/plinky,CodeClub/plinky,martinpeck/plinky,martinpeck/plinky,RaspberryPiFoundation/plinky,RaspberryPiFoundation/plinky
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
- app.run(debug=True)
+ app.run()
|
Remove debug flag from app
|
## Code Before:
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run(debug=True)
## Instruction:
Remove debug flag from app
## Code After:
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
- app.run(debug=True)
+ app.run()
|
e22bf1a54d8b532f0a417221b04e382e71b29186
|
LiSE/LiSE/tests/test_examples.py
|
LiSE/LiSE/tests/test_examples.py
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
Add a test to catch that load error next time
|
Add a test to catch that load error next time
|
Python
|
agpl-3.0
|
LogicalDash/LiSE,LogicalDash/LiSE
|
+ from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
+ def test_char_stat_startup(tempdir):
+ with Engine(tempdir) as eng:
+ tri = eng.new_character('triangle')
+ sq = eng.new_character('square')
+
+ sq.stat['min_sameness'] = 0.1
+ assert 'min_sameness' in sq.stat
+ sq.stat['max_sameness'] = 0.9
+ assert 'max_sameness' in sq.stat
+ tri.stat['min_sameness'] = 0.2
+ assert 'min_sameness' in tri.stat
+ tri.stat['max_sameness'] = 0.8
+ assert 'max_sameness' in tri.stat
+
+ with Engine(tempdir) as eng:
+ assert 'min_sameness' in eng.character['square'].stat
+ assert 'max_sameness' in eng.character['square'].stat
+ assert 'min_sameness' in eng.character['triangle'].stat
+ assert 'max_sameness' in eng.character['triangle'].stat
+
+
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
Add a test to catch that load error next time
|
## Code Before:
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
## Instruction:
Add a test to catch that load error next time
## Code After:
from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_char_stat_startup(tempdir):
with Engine(tempdir) as eng:
tri = eng.new_character('triangle')
sq = eng.new_character('square')
sq.stat['min_sameness'] = 0.1
assert 'min_sameness' in sq.stat
sq.stat['max_sameness'] = 0.9
assert 'max_sameness' in sq.stat
tri.stat['min_sameness'] = 0.2
assert 'min_sameness' in tri.stat
tri.stat['max_sameness'] = 0.8
assert 'max_sameness' in tri.stat
with Engine(tempdir) as eng:
assert 'min_sameness' in eng.character['square'].stat
assert 'max_sameness' in eng.character['square'].stat
assert 'min_sameness' in eng.character['triangle'].stat
assert 'max_sameness' in eng.character['triangle'].stat
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
+ from LiSE import Engine
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
+ def test_char_stat_startup(tempdir):
+ with Engine(tempdir) as eng:
+ tri = eng.new_character('triangle')
+ sq = eng.new_character('square')
+
+ sq.stat['min_sameness'] = 0.1
+ assert 'min_sameness' in sq.stat
+ sq.stat['max_sameness'] = 0.9
+ assert 'max_sameness' in sq.stat
+ tri.stat['min_sameness'] = 0.2
+ assert 'min_sameness' in tri.stat
+ tri.stat['max_sameness'] = 0.8
+ assert 'max_sameness' in tri.stat
+
+ with Engine(tempdir) as eng:
+ assert 'min_sameness' in eng.character['square'].stat
+ assert 'max_sameness' in eng.character['square'].stat
+ assert 'min_sameness' in eng.character['triangle'].stat
+ assert 'max_sameness' in eng.character['triangle'].stat
+
+
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
4a509970cb48b64046f88193efc141344437b151
|
tests/test_list_struct.py
|
tests/test_list_struct.py
|
import pytest
from hypothesis import given
from hypothesis.strategies import lists, integers, floats, one_of, composite
from datatyping.datatyping import validate
def test_empty():
assert validate([], []) is None
@given(li=lists(integers()))
def test_plain(li):
assert validate([int], li) is None
@given(lst=lists(floats(), min_size=1))
def test_plain_type_error(lst):
with pytest.raises(TypeError):
validate([int], lst)
@given(lst=one_of(lists(integers(), min_size=5),
lists(integers(), max_size=3)))
def test_list_lengths(lst):
with pytest.raises(ValueError):
validate([int, int, int, str], lst)
@given(lst=lists(lists(integers())))
def test_nested(lst):
assert validate([[int]], lst)
with pytest.raises(TypeError):
validate([int], lst)
@composite
def heavy_nested_data(draw):
return [draw(lists(integers)), draw(floats()), lists(lists(floats()))]
@given(lst=heavy_nested_data())
def test_heavy_nested(lst):
assert validate([[int], float, [[float]]], lst) is None
with pytest.raises(TypeError):
assert validate([[str], int, int], lst)
with pytest.raises(ValueError):
validate([[[float]]], lst)
|
import pytest
from hypothesis import given
from hypothesis.strategies import lists, integers, floats, one_of, composite
from datatyping.datatyping import validate
def test_empty():
assert validate([], []) is None
@given(li=lists(integers()))
def test_plain(li):
assert validate([int], li) is None
@given(lst=lists(floats(), min_size=1))
def test_plain_type_error(lst):
with pytest.raises(TypeError):
validate([int], lst)
@given(lst=one_of(lists(integers(), min_size=5),
lists(integers(), max_size=3)))
def test_list_lengths(lst):
with pytest.raises(ValueError):
validate([int, int, int, str], lst)
@given(lst=lists(lists(integers(), min_size=1), min_size=1))
def test_nested(lst):
assert validate([[int]], lst) is None
with pytest.raises(TypeError):
validate([int], lst)
@composite
def heavy_nested_data(draw):
return [draw(lists(integers(), min_size=1, max_size=3)),
draw(floats()),
draw(lists(lists(floats(), min_size=1, max_size=3), min_size=1, max_size=3))]
@given(lst=heavy_nested_data())
def test_heavy_nested(lst):
assert validate([[int], float, [[float]]], lst) is None
with pytest.raises(TypeError):
validate([[str], int, int], lst)
with pytest.raises(TypeError):
validate([[[float]]], lst)
|
Fix up mistakes in tests
|
Fix up mistakes in tests
|
Python
|
mit
|
Zaab1t/datatyping
|
import pytest
from hypothesis import given
from hypothesis.strategies import lists, integers, floats, one_of, composite
from datatyping.datatyping import validate
def test_empty():
assert validate([], []) is None
@given(li=lists(integers()))
def test_plain(li):
assert validate([int], li) is None
@given(lst=lists(floats(), min_size=1))
def test_plain_type_error(lst):
with pytest.raises(TypeError):
validate([int], lst)
@given(lst=one_of(lists(integers(), min_size=5),
lists(integers(), max_size=3)))
def test_list_lengths(lst):
with pytest.raises(ValueError):
validate([int, int, int, str], lst)
- @given(lst=lists(lists(integers())))
+ @given(lst=lists(lists(integers(), min_size=1), min_size=1))
def test_nested(lst):
- assert validate([[int]], lst)
+ assert validate([[int]], lst) is None
with pytest.raises(TypeError):
validate([int], lst)
@composite
def heavy_nested_data(draw):
- return [draw(lists(integers)), draw(floats()), lists(lists(floats()))]
+ return [draw(lists(integers(), min_size=1, max_size=3)),
+ draw(floats()),
+ draw(lists(lists(floats(), min_size=1, max_size=3), min_size=1, max_size=3))]
@given(lst=heavy_nested_data())
def test_heavy_nested(lst):
assert validate([[int], float, [[float]]], lst) is None
with pytest.raises(TypeError):
- assert validate([[str], int, int], lst)
+ validate([[str], int, int], lst)
- with pytest.raises(ValueError):
+ with pytest.raises(TypeError):
validate([[[float]]], lst)
|
Fix up mistakes in tests
|
## Code Before:
import pytest
from hypothesis import given
from hypothesis.strategies import lists, integers, floats, one_of, composite
from datatyping.datatyping import validate
def test_empty():
assert validate([], []) is None
@given(li=lists(integers()))
def test_plain(li):
assert validate([int], li) is None
@given(lst=lists(floats(), min_size=1))
def test_plain_type_error(lst):
with pytest.raises(TypeError):
validate([int], lst)
@given(lst=one_of(lists(integers(), min_size=5),
lists(integers(), max_size=3)))
def test_list_lengths(lst):
with pytest.raises(ValueError):
validate([int, int, int, str], lst)
@given(lst=lists(lists(integers())))
def test_nested(lst):
assert validate([[int]], lst)
with pytest.raises(TypeError):
validate([int], lst)
@composite
def heavy_nested_data(draw):
return [draw(lists(integers)), draw(floats()), lists(lists(floats()))]
@given(lst=heavy_nested_data())
def test_heavy_nested(lst):
assert validate([[int], float, [[float]]], lst) is None
with pytest.raises(TypeError):
assert validate([[str], int, int], lst)
with pytest.raises(ValueError):
validate([[[float]]], lst)
## Instruction:
Fix up mistakes in tests
## Code After:
import pytest
from hypothesis import given
from hypothesis.strategies import lists, integers, floats, one_of, composite
from datatyping.datatyping import validate
def test_empty():
assert validate([], []) is None
@given(li=lists(integers()))
def test_plain(li):
assert validate([int], li) is None
@given(lst=lists(floats(), min_size=1))
def test_plain_type_error(lst):
with pytest.raises(TypeError):
validate([int], lst)
@given(lst=one_of(lists(integers(), min_size=5),
lists(integers(), max_size=3)))
def test_list_lengths(lst):
with pytest.raises(ValueError):
validate([int, int, int, str], lst)
@given(lst=lists(lists(integers(), min_size=1), min_size=1))
def test_nested(lst):
assert validate([[int]], lst) is None
with pytest.raises(TypeError):
validate([int], lst)
@composite
def heavy_nested_data(draw):
return [draw(lists(integers(), min_size=1, max_size=3)),
draw(floats()),
draw(lists(lists(floats(), min_size=1, max_size=3), min_size=1, max_size=3))]
@given(lst=heavy_nested_data())
def test_heavy_nested(lst):
assert validate([[int], float, [[float]]], lst) is None
with pytest.raises(TypeError):
validate([[str], int, int], lst)
with pytest.raises(TypeError):
validate([[[float]]], lst)
|
import pytest
from hypothesis import given
from hypothesis.strategies import lists, integers, floats, one_of, composite
from datatyping.datatyping import validate
def test_empty():
assert validate([], []) is None
@given(li=lists(integers()))
def test_plain(li):
assert validate([int], li) is None
@given(lst=lists(floats(), min_size=1))
def test_plain_type_error(lst):
with pytest.raises(TypeError):
validate([int], lst)
@given(lst=one_of(lists(integers(), min_size=5),
lists(integers(), max_size=3)))
def test_list_lengths(lst):
with pytest.raises(ValueError):
validate([int, int, int, str], lst)
- @given(lst=lists(lists(integers())))
+ @given(lst=lists(lists(integers(), min_size=1), min_size=1))
def test_nested(lst):
- assert validate([[int]], lst)
+ assert validate([[int]], lst) is None
? ++++++++
with pytest.raises(TypeError):
validate([int], lst)
@composite
def heavy_nested_data(draw):
- return [draw(lists(integers)), draw(floats()), lists(lists(floats()))]
+ return [draw(lists(integers(), min_size=1, max_size=3)),
+ draw(floats()),
+ draw(lists(lists(floats(), min_size=1, max_size=3), min_size=1, max_size=3))]
@given(lst=heavy_nested_data())
def test_heavy_nested(lst):
assert validate([[int], float, [[float]]], lst) is None
with pytest.raises(TypeError):
- assert validate([[str], int, int], lst)
? -------
+ validate([[str], int, int], lst)
- with pytest.raises(ValueError):
? ^^^^
+ with pytest.raises(TypeError):
? ^^^
validate([[[float]]], lst)
|
a8966a4d3f9a160af3865b8cadb26e58eb36fd64
|
src/database/__init__.py
|
src/database/__init__.py
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
connection_string = 'sqlite://'
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
engine = create_engine(connection_string, echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.pool import StaticPool
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
engine = create_engine('sqlite://',
echo=True,
connect_args={'check_same_thread':False},
poolclass=StaticPool)
else:
engine = create_engine(connection_string)
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
|
Fix the database session init to work with the flask debug server.
|
Fix the database session init to work with the flask debug server.
The debug webserver consists of two parts: the watcher that watches
the files for changes and the worker that is forked and will be restarted
after each modification. Sqlachemy uses a SingletonPool that will not
work with this if the database was initialized within the watcher.
See [1] for more detailed information.
[1] http://docs.sqlalchemy.org/en/rel_0_8/dialects/sqlite.html#threading-pooling-behavior
|
Python
|
bsd-3-clause
|
janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
+ from sqlalchemy.pool import StaticPool
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
- connection_string = 'sqlite://'
+ engine = create_engine('sqlite://',
+ echo=True,
+ connect_args={'check_same_thread':False},
+ poolclass=StaticPool)
+ else:
+ engine = create_engine(connection_string)
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
- engine = create_engine(connection_string, echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
|
Fix the database session init to work with the flask debug server.
|
## Code Before:
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
connection_string = 'sqlite://'
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
engine = create_engine(connection_string, echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
## Instruction:
Fix the database session init to work with the flask debug server.
## Code After:
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.pool import StaticPool
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
engine = create_engine('sqlite://',
echo=True,
connect_args={'check_same_thread':False},
poolclass=StaticPool)
else:
engine = create_engine(connection_string)
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
+ from sqlalchemy.pool import StaticPool
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
- connection_string = 'sqlite://'
+ engine = create_engine('sqlite://',
+ echo=True,
+ connect_args={'check_same_thread':False},
+ poolclass=StaticPool)
+ else:
+ engine = create_engine(connection_string)
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
- engine = create_engine(connection_string, echo=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
|
7060e3f1b1e8bda4c96cdc4b0c84ae344ac81c76
|
Sketches/MPS/test/test_Selector.py
|
Sketches/MPS/test/test_Selector.py
|
import unittest
import sys; sys.path.append("../")
from Selector import Selector
if __name__=="__main__":
unittest.main()
|
import unittest
import sys; sys.path.append("../")
from Selector import Selector
class SmokeTests_Selector(unittest.TestCase):
def test_SmokeTest(self):
"""__init__ - Called with no arguments succeeds"""
S = Selector()
self.assert_(isinstance(S, Axon.Component.component))
if __name__=="__main__":
unittest.main()
|
Add the most basic smoke test. We make a check that the resulting object is a minimal component at least.
|
Add the most basic smoke test. We make a check that the resulting object is a minimal component at least.
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
import unittest
import sys; sys.path.append("../")
from Selector import Selector
+ class SmokeTests_Selector(unittest.TestCase):
+ def test_SmokeTest(self):
+ """__init__ - Called with no arguments succeeds"""
+ S = Selector()
+ self.assert_(isinstance(S, Axon.Component.component))
+
if __name__=="__main__":
unittest.main()
|
Add the most basic smoke test. We make a check that the resulting object is a minimal component at least.
|
## Code Before:
import unittest
import sys; sys.path.append("../")
from Selector import Selector
if __name__=="__main__":
unittest.main()
## Instruction:
Add the most basic smoke test. We make a check that the resulting object is a minimal component at least.
## Code After:
import unittest
import sys; sys.path.append("../")
from Selector import Selector
class SmokeTests_Selector(unittest.TestCase):
def test_SmokeTest(self):
"""__init__ - Called with no arguments succeeds"""
S = Selector()
self.assert_(isinstance(S, Axon.Component.component))
if __name__=="__main__":
unittest.main()
|
import unittest
import sys; sys.path.append("../")
from Selector import Selector
+ class SmokeTests_Selector(unittest.TestCase):
+ def test_SmokeTest(self):
+ """__init__ - Called with no arguments succeeds"""
+ S = Selector()
+ self.assert_(isinstance(S, Axon.Component.component))
+
if __name__=="__main__":
unittest.main()
|
d5a3b6e1eb37883a16c7e98d2a1b7c98d8d67051
|
layout/tests.py
|
layout/tests.py
|
from django.core.urlresolvers import resolve
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
|
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.template.loader import render_to_string
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
def test_home_page_returns_correct_html(self):
expected_html = render_to_string('home.html')
request = HttpRequest()
response = home(request)
actual_html = response.content.decode()
self.assertEqual(actual_html, expected_html)
|
Add test for home page html content
|
Add test for home page html content
|
Python
|
mit
|
jvanbrug/scout,jvanbrug/scout
|
from django.core.urlresolvers import resolve
+ from django.http import HttpRequest
+ from django.template.loader import render_to_string
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
+ def test_home_page_returns_correct_html(self):
+ expected_html = render_to_string('home.html')
+ request = HttpRequest()
+ response = home(request)
+ actual_html = response.content.decode()
+ self.assertEqual(actual_html, expected_html)
+
|
Add test for home page html content
|
## Code Before:
from django.core.urlresolvers import resolve
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
## Instruction:
Add test for home page html content
## Code After:
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.template.loader import render_to_string
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
def test_home_page_returns_correct_html(self):
expected_html = render_to_string('home.html')
request = HttpRequest()
response = home(request)
actual_html = response.content.decode()
self.assertEqual(actual_html, expected_html)
|
from django.core.urlresolvers import resolve
+ from django.http import HttpRequest
+ from django.template.loader import render_to_string
from django.test import TestCase
from layout.views import home
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page(self):
found = resolve('/')
self.assertEqual(found.func, home)
+
+ def test_home_page_returns_correct_html(self):
+ expected_html = render_to_string('home.html')
+ request = HttpRequest()
+ response = home(request)
+ actual_html = response.content.decode()
+ self.assertEqual(actual_html, expected_html)
|
9a896de52b353e17a4216fdaf1342275e1ecc30a
|
autoconf/raw.py
|
autoconf/raw.py
|
from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
|
from _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
Remove gomp dependency for Raw win build
|
Remove gomp dependency for Raw win build
|
Python
|
mit
|
tuttleofx/sconsProject
|
from _external import *
from m import *
+ if not windows:
- from gomp import *
+ from gomp import *
from lcms import *
+
+
+ if windows:
+ tmpDep = [
+ m,
+ lcms,
+ ]
+ else:
+ tmpDep = [
+ m,
+ gomp,
+ lcms,
+ ]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
- dependencies = [
+ dependencies = tmpDep
- m,
- gomp,
- lcms,
- ]
)
|
Remove gomp dependency for Raw win build
|
## Code Before:
from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
## Instruction:
Remove gomp dependency for Raw win build
## Code After:
from _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
from _external import *
from m import *
+ if not windows:
- from gomp import *
+ from gomp import *
? ++++
from lcms import *
+
+
+ if windows:
+ tmpDep = [
+ m,
+ lcms,
+ ]
+ else:
+ tmpDep = [
+ m,
+ gomp,
+ lcms,
+ ]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
- dependencies = [
? ^
+ dependencies = tmpDep
? ^^^^^^
- m,
- gomp,
- lcms,
- ]
)
|
3964606d6f0e28b127af57b1d13c12b3352f861a
|
ggd/__main__.py
|
ggd/__main__.py
|
import sys
import argparse
from .__init__ import __version__
from . make_bash import add_make_bash
from . check_recipe import add_check_recipe
from . list_files import add_list_files
from . search import add_search
from . show_env import add_show_env
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog='ggd', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", help="Installed version",
action="version",
version="%(prog)s " + str(__version__))
sub = parser.add_subparsers(title='[sub-commands]', dest='command')
sub.required = True
add_make_bash(sub)
add_check_recipe(sub)
add_list_files(sub)
add_search(sub)
add_show_env(sub)
args = parser.parse_args(args)
args.func(parser, args)
if __name__ == "__main__":
sys.exit(main() or 0)
|
import sys
import argparse
from .__init__ import __version__
from . make_bash import add_make_bash
from . check_recipe import add_check_recipe
from . list_files import add_list_files
from . search import add_search
from . show_env import add_show_env
from . install import add_install
from . uninstall import add_uninstall
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog='ggd', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", help="Installed version",
action="version",
version="%(prog)s " + str(__version__))
sub = parser.add_subparsers(title='[sub-commands]', dest='command')
sub.required = True
add_make_bash(sub)
add_check_recipe(sub)
add_list_files(sub)
add_search(sub)
add_show_env(sub)
add_install(sub)
add_uninstall(sub)
args = parser.parse_args(args)
args.func(parser, args)
if __name__ == "__main__":
sys.exit(main() or 0)
|
Add installer and uninstaller to main
|
Add installer and uninstaller to main
|
Python
|
mit
|
gogetdata/ggd-cli,gogetdata/ggd-cli
|
import sys
import argparse
from .__init__ import __version__
from . make_bash import add_make_bash
from . check_recipe import add_check_recipe
from . list_files import add_list_files
from . search import add_search
from . show_env import add_show_env
+ from . install import add_install
+ from . uninstall import add_uninstall
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog='ggd', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", help="Installed version",
action="version",
version="%(prog)s " + str(__version__))
sub = parser.add_subparsers(title='[sub-commands]', dest='command')
sub.required = True
add_make_bash(sub)
add_check_recipe(sub)
add_list_files(sub)
add_search(sub)
add_show_env(sub)
+ add_install(sub)
+
+ add_uninstall(sub)
+
args = parser.parse_args(args)
args.func(parser, args)
if __name__ == "__main__":
sys.exit(main() or 0)
|
Add installer and uninstaller to main
|
## Code Before:
import sys
import argparse
from .__init__ import __version__
from . make_bash import add_make_bash
from . check_recipe import add_check_recipe
from . list_files import add_list_files
from . search import add_search
from . show_env import add_show_env
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog='ggd', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", help="Installed version",
action="version",
version="%(prog)s " + str(__version__))
sub = parser.add_subparsers(title='[sub-commands]', dest='command')
sub.required = True
add_make_bash(sub)
add_check_recipe(sub)
add_list_files(sub)
add_search(sub)
add_show_env(sub)
args = parser.parse_args(args)
args.func(parser, args)
if __name__ == "__main__":
sys.exit(main() or 0)
## Instruction:
Add installer and uninstaller to main
## Code After:
import sys
import argparse
from .__init__ import __version__
from . make_bash import add_make_bash
from . check_recipe import add_check_recipe
from . list_files import add_list_files
from . search import add_search
from . show_env import add_show_env
from . install import add_install
from . uninstall import add_uninstall
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog='ggd', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", help="Installed version",
action="version",
version="%(prog)s " + str(__version__))
sub = parser.add_subparsers(title='[sub-commands]', dest='command')
sub.required = True
add_make_bash(sub)
add_check_recipe(sub)
add_list_files(sub)
add_search(sub)
add_show_env(sub)
add_install(sub)
add_uninstall(sub)
args = parser.parse_args(args)
args.func(parser, args)
if __name__ == "__main__":
sys.exit(main() or 0)
|
import sys
import argparse
from .__init__ import __version__
from . make_bash import add_make_bash
from . check_recipe import add_check_recipe
from . list_files import add_list_files
from . search import add_search
from . show_env import add_show_env
+ from . install import add_install
+ from . uninstall import add_uninstall
def main(args=None):
if args is None:
args = sys.argv[1:]
parser = argparse.ArgumentParser(prog='ggd', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-v", "--version", help="Installed version",
action="version",
version="%(prog)s " + str(__version__))
sub = parser.add_subparsers(title='[sub-commands]', dest='command')
sub.required = True
add_make_bash(sub)
add_check_recipe(sub)
add_list_files(sub)
add_search(sub)
add_show_env(sub)
+ add_install(sub)
+
+ add_uninstall(sub)
+
args = parser.parse_args(args)
args.func(parser, args)
if __name__ == "__main__":
sys.exit(main() or 0)
|
04bccd678ba7a67373b94695d7d87d0cf95dffd6
|
tests/unit/app_unit_test.py
|
tests/unit/app_unit_test.py
|
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Python
|
mit
|
BMeu/Orchard,BMeu/Orchard
|
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
+ app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
Set default locale in test to avoid test failures when different default is used than expected.
|
## Code Before:
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
## Instruction:
Set default locale in test to avoid test failures when different default is used than expected.
## Code After:
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
import unittest
import orchard
class AppUnitTest(unittest.TestCase):
def setUp(self):
app = orchard.create_app('Testing')
+ app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app_context = app.app_context()
self.app_context.push()
self.client = app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
def test_index(self):
response = self.client.get('/')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard!' in data)
response = self.client.get('/BMeu')
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue('Welcome to the Orchard, BMeu!' in data)
|
8a7b6be29b3a839ba8e5c2cb33322d90d51d5fc4
|
karbor/tests/unit/conf_fixture.py
|
karbor/tests/unit/conf_fixture.py
|
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
|
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
|
Fix loading 'provider_config_dir' opt error
|
Fix loading 'provider_config_dir' opt error
When run unit test using 'ostestr --pdb' command. it may get
an error that can not find the config opt 'provider_config_dir'.
Change-Id: Ibc1c693a1531c791ad434ff56ee349ba3afb3d63
Closes-Bug: #1649443
|
Python
|
apache-2.0
|
openstack/smaug,openstack/smaug
|
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
+ CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
|
Fix loading 'provider_config_dir' opt error
|
## Code Before:
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
## Instruction:
Fix loading 'provider_config_dir' opt error
## Code After:
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
|
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.import_opt('policy_file', 'karbor.policy', group='oslo_policy')
+ CONF.import_opt('provider_config_dir', 'karbor.services.protection.provider')
def set_defaults(conf):
conf.set_default('connection', 'sqlite://', group='database')
conf.set_default('sqlite_synchronous', False, group='database')
conf.set_default('policy_file', 'karbor.tests.unit/policy.json',
group='oslo_policy')
conf.set_default('policy_dirs', [], group='oslo_policy')
conf.set_default('auth_strategy', 'noauth')
conf.set_default('state_path', os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..')))
conf.set_default('provider_config_dir',
os.path.join(os.path.dirname(__file__), 'fake_providers'))
|
d1a784ec841f4f0fbe8945bf7a5f81e7c3952b93
|
plugin_handler.py
|
plugin_handler.py
|
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
Disable more plugins to make it work again.
|
Disable more plugins to make it work again.
Will fix venues parsing later.
Signed-off-by: Ville Valkonen <[email protected]>
|
Python
|
isc
|
weezel/BandEventNotifier
|
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
- blocklist = ["plugin_tiketti", "plugin_telakka"]
+ blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
Disable more plugins to make it work again.
|
## Code Before:
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
## Instruction:
Disable more plugins to make it work again.
## Code After:
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
import os
import pkgutil
import sys
from typing import List
from venues.abstract_venue import AbstractVenue
def load_venue_plugins() -> List[AbstractVenue]:
"""
Read plugin directory and load found plugins.
Variable "blocklist" can be used to exclude loading certain plugins.
"""
- blocklist = ["plugin_tiketti", "plugin_telakka"]
+ blocklist = ["plugin_tiketti", "plugin_telakka", "plugin_lutakko", "plugin_yotalo"]
found_blocked = list()
loadedplugins = list()
pluginspathabs = os.path.join(os.path.dirname(__file__), "venues")
for loader, plugname, ispkg in \
pkgutil.iter_modules(path=[pluginspathabs]):
if plugname in sys.modules or plugname == "abstract_venue":
continue
if plugname in blocklist:
found_blocked.append(plugname.lstrip("plugin_"))
continue
plugpath = f"venues.{plugname}"
loadplug = __import__(plugpath, fromlist=[plugname])
classname = plugname.split("_")[1].title()
loadedclass = getattr(loadplug, classname)
instance = loadedclass()
loadedplugins.append(instance)
print(f"Loaded plugin: {instance.get_venue_name()}")
print("Blocked plugins: {}.\n".format(", ".join(found_blocked[1:])))
return loadedplugins
|
b5fc62d022cd773a0333560f30d8c8c0d6dbd25e
|
txircd/utils.py
|
txircd/utils.py
|
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
|
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
|
Add a ModeType enum for later benefit
|
Add a ModeType enum for later benefit
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
+ def _enum(**enums):
+ return type('Enum', (), enums)
+
+ ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
+
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
|
Add a ModeType enum for later benefit
|
## Code Before:
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
## Instruction:
Add a ModeType enum for later benefit
## Code After:
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
|
+ def _enum(**enums):
+ return type('Enum', (), enums)
+
+ ModeType = _enum(List=0, ParamOnUnset=1, Param=2, NoParam=3, Status=4)
+
def unescapeEndpointDescription(desc):
result = []
escape = []
depth = 0
desc = iter(desc)
for char in desc:
if char == "\\":
try:
char = desc.next()
except StopIteration:
raise ValueError ("Endpoint description not valid: escaped end of string")
if char not in "{}":
char = "\\{}".format(char)
if depth == 0:
result.extend(char)
else:
escape.extend(char)
elif char == "{":
if depth > 0:
escape.append("{")
depth += 1
elif char == "}":
depth -= 1
if depth < 0:
raise ValueError ("Endpoint description not valid: mismatched end brace")
if depth == 0:
result.extend(unescapeEndpointDescription("".join(escape)).replace("\\", "\\\\").replace(":", "\\:").replace("=", "\\="))
else:
escape.append("}")
else:
if depth == 0:
result.append(char)
else:
escape.append(char)
if depth != 0:
raise ValueError ("Endpoint description not valid: mismatched opening brace")
return "".join(result)
|
f58940027a0e152ba68917a4b85dd1dfed1095a9
|
appname/server.py
|
appname/server.py
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
Add data: and unsafe-local for base64 fonts and inline js
|
Add data: and unsafe-local for base64 fonts and inline js
|
Python
|
mit
|
LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples,LandRegistry-Attic/flask-examples
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
- response.headers.add('Content-Security-Policy', "default-src 'self'")
+ response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
Add data: and unsafe-local for base64 fonts and inline js
|
## Code Before:
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self'")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
## Instruction:
Add data: and unsafe-local for base64 fonts and inline js
## Code After:
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
from flask import render_template
from appname import app, db
from models import Foo
from flask.ext.assets import Environment, Bundle
# Static assets
assets = Environment(app)
css_main = Bundle(
'stylesheets/main.scss',
filters='scss',
output='build/main.css',
depends="**/*.scss"
)
assets.register('css_main', css_main)
# govuk_template asset path
@app.context_processor
def asset_path_context_processor():
return {'asset_path': '/static/govuk_template/'}
@app.route('/')
def index():
return render_template("index.html")
# Some useful headers to set to beef up the robustness of the app
# https://www.owasp.org/index.php/List_of_useful_HTTP_headers
@app.after_request
def after_request(response):
- response.headers.add('Content-Security-Policy', "default-src 'self'")
+ response.headers.add('Content-Security-Policy', "default-src 'self' 'unsafe-inline' data:")
? ++++++++++++++++++++++
response.headers.add('X-Frame-Options', 'deny')
response.headers.add('X-Content-Type-Options', 'nosniff')
response.headers.add('X-XSS-Protection', '1; mode=block')
return response
|
eb1d581a94f87feb2bc09dbf45b13de282a205e8
|
pyqode/json/modes/autocomplete.py
|
pyqode/json/modes/autocomplete.py
|
from pyqode.core import modes
from pyqode.core.api import TextHelper
class AutoCompleteMode(modes.AutoCompleteMode):
def __init__(self):
super(AutoCompleteMode, self).__init__()
self.QUOTES_FORMATS.pop("'")
self.SELECTED_QUOTES_FORMATS.pop("'")
self.MAPPING.pop("'")
def _on_key_pressed(self, event):
helper = TextHelper(self.editor)
indent = helper.line_indent() * ' '
if self.editor.textCursor().positionInBlock() == len(indent):
self.QUOTES_FORMATS['"'] = '%s:'
else:
self.QUOTES_FORMATS['"'] = '%s'
self.QUOTES_FORMATS['{'] = '\n' + indent + '%s'
self.QUOTES_FORMATS['['] = '\n' + indent + '%s'
super(AutoCompleteMode, self)._on_key_pressed(event)
|
from pyqode.core import modes
from pyqode.core.api import TextHelper
class AutoCompleteMode(modes.AutoCompleteMode):
def __init__(self):
super(AutoCompleteMode, self).__init__()
try:
self.QUOTES_FORMATS.pop("'")
self.SELECTED_QUOTES_FORMATS.pop("'")
self.MAPPING.pop("'")
except KeyError:
pass
def _on_key_pressed(self, event):
helper = TextHelper(self.editor)
indent = helper.line_indent() * ' '
if self.editor.textCursor().positionInBlock() == len(indent):
self.QUOTES_FORMATS['"'] = '%s:'
else:
self.QUOTES_FORMATS['"'] = '%s'
self.QUOTES_FORMATS['{'] = '\n' + indent + '%s'
self.QUOTES_FORMATS['['] = '\n' + indent + '%s'
super(AutoCompleteMode, self)._on_key_pressed(event)
|
Fix issue with auto complete when more than 1 editor has been created
|
Fix issue with auto complete when more than 1 editor has been created
|
Python
|
mit
|
pyQode/pyqode.json,pyQode/pyqode.json
|
from pyqode.core import modes
from pyqode.core.api import TextHelper
class AutoCompleteMode(modes.AutoCompleteMode):
def __init__(self):
super(AutoCompleteMode, self).__init__()
+ try:
- self.QUOTES_FORMATS.pop("'")
+ self.QUOTES_FORMATS.pop("'")
- self.SELECTED_QUOTES_FORMATS.pop("'")
+ self.SELECTED_QUOTES_FORMATS.pop("'")
- self.MAPPING.pop("'")
+ self.MAPPING.pop("'")
+ except KeyError:
+ pass
def _on_key_pressed(self, event):
helper = TextHelper(self.editor)
indent = helper.line_indent() * ' '
if self.editor.textCursor().positionInBlock() == len(indent):
self.QUOTES_FORMATS['"'] = '%s:'
else:
self.QUOTES_FORMATS['"'] = '%s'
self.QUOTES_FORMATS['{'] = '\n' + indent + '%s'
self.QUOTES_FORMATS['['] = '\n' + indent + '%s'
super(AutoCompleteMode, self)._on_key_pressed(event)
|
Fix issue with auto complete when more than 1 editor has been created
|
## Code Before:
from pyqode.core import modes
from pyqode.core.api import TextHelper
class AutoCompleteMode(modes.AutoCompleteMode):
def __init__(self):
super(AutoCompleteMode, self).__init__()
self.QUOTES_FORMATS.pop("'")
self.SELECTED_QUOTES_FORMATS.pop("'")
self.MAPPING.pop("'")
def _on_key_pressed(self, event):
helper = TextHelper(self.editor)
indent = helper.line_indent() * ' '
if self.editor.textCursor().positionInBlock() == len(indent):
self.QUOTES_FORMATS['"'] = '%s:'
else:
self.QUOTES_FORMATS['"'] = '%s'
self.QUOTES_FORMATS['{'] = '\n' + indent + '%s'
self.QUOTES_FORMATS['['] = '\n' + indent + '%s'
super(AutoCompleteMode, self)._on_key_pressed(event)
## Instruction:
Fix issue with auto complete when more than 1 editor has been created
## Code After:
from pyqode.core import modes
from pyqode.core.api import TextHelper
class AutoCompleteMode(modes.AutoCompleteMode):
def __init__(self):
super(AutoCompleteMode, self).__init__()
try:
self.QUOTES_FORMATS.pop("'")
self.SELECTED_QUOTES_FORMATS.pop("'")
self.MAPPING.pop("'")
except KeyError:
pass
def _on_key_pressed(self, event):
helper = TextHelper(self.editor)
indent = helper.line_indent() * ' '
if self.editor.textCursor().positionInBlock() == len(indent):
self.QUOTES_FORMATS['"'] = '%s:'
else:
self.QUOTES_FORMATS['"'] = '%s'
self.QUOTES_FORMATS['{'] = '\n' + indent + '%s'
self.QUOTES_FORMATS['['] = '\n' + indent + '%s'
super(AutoCompleteMode, self)._on_key_pressed(event)
|
from pyqode.core import modes
from pyqode.core.api import TextHelper
class AutoCompleteMode(modes.AutoCompleteMode):
def __init__(self):
super(AutoCompleteMode, self).__init__()
+ try:
- self.QUOTES_FORMATS.pop("'")
+ self.QUOTES_FORMATS.pop("'")
? ++++
- self.SELECTED_QUOTES_FORMATS.pop("'")
+ self.SELECTED_QUOTES_FORMATS.pop("'")
? ++++
- self.MAPPING.pop("'")
+ self.MAPPING.pop("'")
? ++++
+ except KeyError:
+ pass
def _on_key_pressed(self, event):
helper = TextHelper(self.editor)
indent = helper.line_indent() * ' '
if self.editor.textCursor().positionInBlock() == len(indent):
self.QUOTES_FORMATS['"'] = '%s:'
else:
self.QUOTES_FORMATS['"'] = '%s'
self.QUOTES_FORMATS['{'] = '\n' + indent + '%s'
self.QUOTES_FORMATS['['] = '\n' + indent + '%s'
super(AutoCompleteMode, self)._on_key_pressed(event)
|
360bdaa2df7673bc2090476df077c86c6f7c5633
|
utils/exceptions.py
|
utils/exceptions.py
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
Change constructor to be more appropriate
|
Change constructor to be more appropriate
|
Python
|
mit
|
BeatButton/beattie-bot,BeatButton/beattie
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
- def __init__(self, *args, code=None, **kwargs):
+ def __init__(self, code=None, *args):
self.code = code
- super().__init__(*args, **kwargs)
+ super().__init__(code, *args)
|
Change constructor to be more appropriate
|
## Code Before:
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, *args, code=None, **kwargs):
self.code = code
super().__init__(*args, **kwargs)
## Instruction:
Change constructor to be more appropriate
## Code After:
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
def __init__(self, code=None, *args):
self.code = code
super().__init__(code, *args)
|
class ResponseError(Exception):
"""For throwing in case of a non-200 response status."""
- def __init__(self, *args, code=None, **kwargs):
? ------- ---
+ def __init__(self, code=None, *args):
self.code = code
- super().__init__(*args, **kwargs)
? ----------
+ super().__init__(code, *args)
? ++++++
|
8f93bad77371fbc0d7dc75548472c7715eb8a2ee
|
climlab/tests/test_rcm.py
|
climlab/tests/test_rcm.py
|
from __future__ import division
import numpy as np
import climlab
import pytest
@pytest.fixture()
def rcm():
# initial state (temperatures)
state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.)
## Create individual physical process models:
# fixed relative humidity
h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O')
# Hard convective adjustment
convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment',
adj_lapse_rate=6.5)
# CAM3 radiation with default parameters and interactive water vapor
rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation')
# Couple the models
rcm = climlab.couple([h2o,convadj,rad], name='RCM')
return rcm
def test_convective_adjustment(rcm):
rcm.step_forward()
# test non-scalar critical lapse rate
num_lev = rcm.lev.size
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev)
rcm.step_forward()
# test pseudoadiabatic critical lapse rate
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat'
rcm.step_forward()
|
from __future__ import division
import numpy as np
import climlab
import pytest
@pytest.fixture()
def rcm():
# initial state (temperatures)
state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.)
## Create individual physical process models:
# fixed relative humidity
h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O')
# Hard convective adjustment
convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment',
adj_lapse_rate=6.5)
# CAM3 radiation with default parameters and interactive water vapor
rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation')
# Couple the models
rcm = climlab.couple([h2o,convadj,rad], name='RCM')
return rcm
@pytest.mark.fast
def test_convective_adjustment(rcm):
rcm.step_forward()
# test non-scalar critical lapse rate
num_lev = rcm.lev.size
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev)
rcm.step_forward()
# test pseudoadiabatic critical lapse rate
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat'
rcm.step_forward()
|
Mark rcm test as fast so it executes during build and test
|
Mark rcm test as fast so it executes during build and test
|
Python
|
mit
|
cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab,cjcardinale/climlab,brian-rose/climlab
|
from __future__ import division
import numpy as np
import climlab
import pytest
@pytest.fixture()
def rcm():
# initial state (temperatures)
state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.)
## Create individual physical process models:
# fixed relative humidity
h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O')
# Hard convective adjustment
convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment',
adj_lapse_rate=6.5)
# CAM3 radiation with default parameters and interactive water vapor
rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation')
# Couple the models
rcm = climlab.couple([h2o,convadj,rad], name='RCM')
return rcm
+ @pytest.mark.fast
def test_convective_adjustment(rcm):
rcm.step_forward()
# test non-scalar critical lapse rate
num_lev = rcm.lev.size
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev)
rcm.step_forward()
# test pseudoadiabatic critical lapse rate
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat'
rcm.step_forward()
|
Mark rcm test as fast so it executes during build and test
|
## Code Before:
from __future__ import division
import numpy as np
import climlab
import pytest
@pytest.fixture()
def rcm():
# initial state (temperatures)
state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.)
## Create individual physical process models:
# fixed relative humidity
h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O')
# Hard convective adjustment
convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment',
adj_lapse_rate=6.5)
# CAM3 radiation with default parameters and interactive water vapor
rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation')
# Couple the models
rcm = climlab.couple([h2o,convadj,rad], name='RCM')
return rcm
def test_convective_adjustment(rcm):
rcm.step_forward()
# test non-scalar critical lapse rate
num_lev = rcm.lev.size
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev)
rcm.step_forward()
# test pseudoadiabatic critical lapse rate
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat'
rcm.step_forward()
## Instruction:
Mark rcm test as fast so it executes during build and test
## Code After:
from __future__ import division
import numpy as np
import climlab
import pytest
@pytest.fixture()
def rcm():
# initial state (temperatures)
state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.)
## Create individual physical process models:
# fixed relative humidity
h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O')
# Hard convective adjustment
convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment',
adj_lapse_rate=6.5)
# CAM3 radiation with default parameters and interactive water vapor
rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation')
# Couple the models
rcm = climlab.couple([h2o,convadj,rad], name='RCM')
return rcm
@pytest.mark.fast
def test_convective_adjustment(rcm):
rcm.step_forward()
# test non-scalar critical lapse rate
num_lev = rcm.lev.size
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev)
rcm.step_forward()
# test pseudoadiabatic critical lapse rate
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat'
rcm.step_forward()
|
from __future__ import division
import numpy as np
import climlab
import pytest
@pytest.fixture()
def rcm():
# initial state (temperatures)
state = climlab.column_state(num_lev=num_lev, num_lat=1, water_depth=5.)
## Create individual physical process models:
# fixed relative humidity
h2o = climlab.radiation.ManabeWaterVapor(state=state, name='H2O')
# Hard convective adjustment
convadj = climlab.convection.ConvectiveAdjustment(state=state, name='ConvectiveAdjustment',
adj_lapse_rate=6.5)
# CAM3 radiation with default parameters and interactive water vapor
rad = climlab.radiation.RRTMG(state=state, albedo=alb, specific_humidity=h2o.q, name='Radiation')
# Couple the models
rcm = climlab.couple([h2o,convadj,rad], name='RCM')
return rcm
+ @pytest.mark.fast
def test_convective_adjustment(rcm):
rcm.step_forward()
# test non-scalar critical lapse rate
num_lev = rcm.lev.size
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = np.linspace(5., 8., num_lev)
rcm.step_forward()
# test pseudoadiabatic critical lapse rate
rcm.subprocess['ConvectiveAdjustment'].adj_lapse_rate = 'pseudoadiabat'
rcm.step_forward()
|
024ea3b2e9e373abdcd78e44a163a2c32345073f
|
unittests.py
|
unittests.py
|
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
def callback( masterKey ):
self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
Extend UniformDH test to also verify the length of the shared master secret.
|
Extend UniformDH test to also verify the length of the shared master secret.
|
Python
|
bsd-3-clause
|
isislovecruft/scramblesuit,isislovecruft/scramblesuit
|
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
+ def callback( masterKey ):
+ self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
+
- self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
+ self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
Extend UniformDH test to also verify the length of the shared master secret.
|
## Code Before:
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
## Instruction:
Extend UniformDH test to also verify the length of the shared master secret.
## Code After:
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
def callback( masterKey ):
self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
import unittest
import const
import uniformdh
import obfsproxy.network.buffer as obfs_buf
class UniformDHTest( unittest.TestCase ):
def setUp( self ):
weAreServer = True
self.udh = uniformdh.new("A" * const.SHARED_SECRET_LENGTH, weAreServer)
def test1_createHandshake( self ):
handshake = self.udh.createHandshake()
self.failUnless((const.PUBLIC_KEY_LENGTH +
const.MARKER_LENGTH +
const.HMAC_LENGTH) <= len(handshake) <=
(const.MARKER_LENGTH +
const.HMAC_LENGTH +
const.MAX_PADDING_LENGTH))
def test2_receivePublicKey( self ):
buf = obfs_buf.Buffer(self.udh.createHandshake())
+ def callback( masterKey ):
+ self.failUnless(len(masterKey) == const.MASTER_KEY_LENGTH)
+
- self.failUnless(self.udh.receivePublicKey(buf, lambda x: x) == True)
? ^^^^^^^^^
+ self.failUnless(self.udh.receivePublicKey(buf, callback) == True)
? ++ ++ ^^
publicKey = self.udh.getRemotePublicKey()
self.failUnless(len(publicKey) == const.PUBLIC_KEY_LENGTH)
def test3_invalidHMAC( self ):
# Make the HMAC invalid.
handshake = self.udh.createHandshake()
if handshake[-1] != 'a':
handshake = handshake[:-1] + 'a'
else:
handshake = handshake[:-1] + 'b'
buf = obfs_buf.Buffer(handshake)
self.failIf(self.udh.receivePublicKey(buf, lambda x: x) == True)
if __name__ == '__main__':
unittest.main()
|
81ca54adbfdb605cd63674134144e058c46bab5f
|
nalaf/features/embeddings.py
|
nalaf/features/embeddings.py
|
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
self.model = Word2Vec.load(model_file)
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
token.features['embedding_{}'.format(index)] = self.weight * value.item()
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, additive=0, multiplicative=1):
self.model = Word2Vec.load(model_file)
self.additive = additive
self.multiplicative = multiplicative
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
token.features['embedding_{}'.format(index)] = (self.additive + value.item()) * self.multiplicative
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
Make WE use additive and multiplicative constants
|
Make WE use additive and multiplicative constants
|
Python
|
apache-2.0
|
Rostlab/nalaf
|
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
- def __init__(self, model_file, weight=1):
+ def __init__(self, model_file, additive=0, multiplicative=1):
self.model = Word2Vec.load(model_file)
- self.weight = weight
+ self.additive = additive
+ self.multiplicative = multiplicative
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
- token.features['embedding_{}'.format(index)] = self.weight * value.item()
+ token.features['embedding_{}'.format(index)] = (self.additive + value.item()) * self.multiplicative
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
Make WE use additive and multiplicative constants
|
## Code Before:
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
self.model = Word2Vec.load(model_file)
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
token.features['embedding_{}'.format(index)] = self.weight * value.item()
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
## Instruction:
Make WE use additive and multiplicative constants
## Code After:
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, additive=0, multiplicative=1):
self.model = Word2Vec.load(model_file)
self.additive = additive
self.multiplicative = multiplicative
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
token.features['embedding_{}'.format(index)] = (self.additive + value.item()) * self.multiplicative
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
- def __init__(self, model_file, weight=1):
? ^ ^^
+ def __init__(self, model_file, additive=0, multiplicative=1):
? ^^^^^^^ +++++++++ ^^^^^ +++
self.model = Word2Vec.load(model_file)
- self.weight = weight
+ self.additive = additive
+ self.multiplicative = multiplicative
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
- token.features['embedding_{}'.format(index)] = self.weight * value.item()
? ^ ---- ^
+ token.features['embedding_{}'.format(index)] = (self.additive + value.item()) * self.multiplicative
? + ^^^^^^^ ^ +++++++++++++++++++++++
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
1db07b9a534e533200f83de4f86d854d0bcda087
|
examples/exotica_examples/tests/runtest.py
|
examples/exotica_examples/tests/runtest.py
|
import subprocess
import os
import sys
cpptests = ['test_initializers',
'test_maps'
]
pytests = ['core.py',
'valkyrie_com.py',
'valkyrie_collision_check_fcl_default.py',
'valkyrie_collision_check_fcl_latest.py'
]
for test in cpptests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if process.wait()!=0:
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
for test in pytests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if output[-1][0:11]!='>>SUCCESS<<':
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
|
import subprocess
import os
import sys
cpptests = ['test_initializers',
'test_maps'
]
pytests = ['core.py',
'valkyrie_com.py',
'valkyrie_collision_check_fcl_default.py',
'valkyrie_collision_check_fcl_latest.py',
'collision_scene_distances.py'
]
for test in cpptests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if process.wait()!=0:
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
for test in pytests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if output[-1][0:11]!='>>SUCCESS<<':
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
|
Add collision_scene_distances to set of tests to run
|
Add collision_scene_distances to set of tests to run
|
Python
|
bsd-3-clause
|
openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica
|
import subprocess
import os
import sys
cpptests = ['test_initializers',
'test_maps'
]
pytests = ['core.py',
'valkyrie_com.py',
'valkyrie_collision_check_fcl_default.py',
- 'valkyrie_collision_check_fcl_latest.py'
+ 'valkyrie_collision_check_fcl_latest.py',
+ 'collision_scene_distances.py'
]
for test in cpptests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if process.wait()!=0:
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
for test in pytests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if output[-1][0:11]!='>>SUCCESS<<':
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
|
Add collision_scene_distances to set of tests to run
|
## Code Before:
import subprocess
import os
import sys
cpptests = ['test_initializers',
'test_maps'
]
pytests = ['core.py',
'valkyrie_com.py',
'valkyrie_collision_check_fcl_default.py',
'valkyrie_collision_check_fcl_latest.py'
]
for test in cpptests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if process.wait()!=0:
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
for test in pytests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if output[-1][0:11]!='>>SUCCESS<<':
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
## Instruction:
Add collision_scene_distances to set of tests to run
## Code After:
import subprocess
import os
import sys
cpptests = ['test_initializers',
'test_maps'
]
pytests = ['core.py',
'valkyrie_com.py',
'valkyrie_collision_check_fcl_default.py',
'valkyrie_collision_check_fcl_latest.py',
'collision_scene_distances.py'
]
for test in cpptests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if process.wait()!=0:
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
for test in pytests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if output[-1][0:11]!='>>SUCCESS<<':
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
|
import subprocess
import os
import sys
cpptests = ['test_initializers',
'test_maps'
]
pytests = ['core.py',
'valkyrie_com.py',
'valkyrie_collision_check_fcl_default.py',
- 'valkyrie_collision_check_fcl_latest.py'
+ 'valkyrie_collision_check_fcl_latest.py',
? +
+ 'collision_scene_distances.py'
]
for test in cpptests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if process.wait()!=0:
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
for test in pytests:
process=subprocess.Popen(['rosrun', 'exotica_examples', test],stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = process.stdout.readlines()
print(''.join(output))
if output[-1][0:11]!='>>SUCCESS<<':
print('Test '+test+' failed\n'+process.stderr.read())
os._exit(1)
|
3dfa781ce8e073f40eda3d80794ad1caff5d5920
|
samples/migrateAccount.py
|
samples/migrateAccount.py
|
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
# for migrating a single account...
myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
# for migrating a batch of accounts...
myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
# un-comment one of the lines below, depending on which workflow you wish to use
### for migrating a single account...
# myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
### for migrating multiple accounts...
# myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
Enhance comments in Migrate Account sample
|
Enhance comments in Migrate Account sample
|
Python
|
apache-2.0
|
oevans/ago-tools
|
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
+ # un-comment one of the lines below, depending on which workflow you wish to use
- # for migrating a single account...
- myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
- # for migrating a batch of accounts...
+ ### for migrating a single account...
+ # myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
+
+ ### for migrating multiple accounts...
- myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
+ # myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
Enhance comments in Migrate Account sample
|
## Code Before:
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
# for migrating a single account...
myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
# for migrating a batch of accounts...
myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
## Instruction:
Enhance comments in Migrate Account sample
## Code After:
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
# un-comment one of the lines below, depending on which workflow you wish to use
### for migrating a single account...
# myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
### for migrating multiple accounts...
# myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
+ # un-comment one of the lines below, depending on which workflow you wish to use
- # for migrating a single account...
- myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
- # for migrating a batch of accounts...
? ^^^^^^^^ -
+ ### for migrating a single account...
? ++ ^^^^^^
+ # myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
+
+ ### for migrating multiple accounts...
- myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
+ # myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
? ++
|
ff9a8cb1f68785cc16c99fe26dd96e9fa01c325e
|
src/hunter/const.py
|
src/hunter/const.py
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
Add checks in case site.py is broken (eg: virtualenv).
|
Add checks in case site.py is broken (eg: virtualenv).
|
Python
|
bsd-2-clause
|
ionelmc/python-hunter
|
import site
import sys
from distutils.sysconfig import get_python_lib
+ SITE_PACKAGES_PATHS = set()
+ if hasattr(site, 'getsitepackages'):
- SITE_PACKAGES_PATHS = set(site.getsitepackages())
+ SITE_PACKAGES_PATHS.update(site.getsitepackages())
+ if hasattr(site, 'getusersitepackages'):
- SITE_PACKAGES_PATHS.add(site.getusersitepackages())
+ SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
Add checks in case site.py is broken (eg: virtualenv).
|
## Code Before:
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set(site.getsitepackages())
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
## Instruction:
Add checks in case site.py is broken (eg: virtualenv).
## Code After:
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
import site
import sys
from distutils.sysconfig import get_python_lib
+ SITE_PACKAGES_PATHS = set()
+ if hasattr(site, 'getsitepackages'):
- SITE_PACKAGES_PATHS = set(site.getsitepackages())
? ^^^^ -
+ SITE_PACKAGES_PATHS.update(site.getsitepackages())
? ++++ ^^^^^^
+ if hasattr(site, 'getusersitepackages'):
- SITE_PACKAGES_PATHS.add(site.getusersitepackages())
+ SITE_PACKAGES_PATHS.add(site.getusersitepackages())
? ++++
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
6a3fbb7280c1078b574736eae3c6a3e4e42d3f46
|
seaborn/__init__.py
|
seaborn/__init__.py
|
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
Remove top-level import of timeseries module
|
Remove top-level import of timeseries module
|
Python
|
bsd-3-clause
|
arokem/seaborn,mwaskom/seaborn,mwaskom/seaborn,arokem/seaborn,anntzer/seaborn,anntzer/seaborn
|
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
- from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
Remove top-level import of timeseries module
|
## Code Before:
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
## Instruction:
Remove top-level import of timeseries module
## Code After:
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
import matplotlib as mpl
_orig_rc_params = mpl.rcParams.copy()
# Import seaborn objects
from .rcmod import *
from .utils import *
from .palettes import *
from .relational import *
from .regression import *
from .categorical import *
from .distributions import *
- from .timeseries import *
from .matrix import *
from .miscplot import *
from .axisgrid import *
from .widgets import *
from .colors import xkcd_rgb, crayons
from . import cm
__version__ = "0.9.1.dev0"
|
010de29acb284250667b393f9e1ba7b34b53aaf5
|
pygametemplate/__init__.py
|
pygametemplate/__init__.py
|
"""pygametemplate module for making creating games with Pygame easier."""
from __future__ import absolute_import
__version__ = "0.2.0"
__author__ = "Andrew Dean"
from pygametemplate.game import Game
|
"""pygametemplate module for making creating games with Pygame easier."""
from __future__ import absolute_import
__version__ = "0.2.0"
__author__ = "Andrew Dean"
from pygametemplate.game import Game
from pygametemplate.view import View
|
Add View as a first class member of pygametemplate
|
Add View as a first class member of pygametemplate
|
Python
|
mit
|
AndyDeany/pygame-template
|
"""pygametemplate module for making creating games with Pygame easier."""
from __future__ import absolute_import
__version__ = "0.2.0"
__author__ = "Andrew Dean"
from pygametemplate.game import Game
+ from pygametemplate.view import View
|
Add View as a first class member of pygametemplate
|
## Code Before:
"""pygametemplate module for making creating games with Pygame easier."""
from __future__ import absolute_import
__version__ = "0.2.0"
__author__ = "Andrew Dean"
from pygametemplate.game import Game
## Instruction:
Add View as a first class member of pygametemplate
## Code After:
"""pygametemplate module for making creating games with Pygame easier."""
from __future__ import absolute_import
__version__ = "0.2.0"
__author__ = "Andrew Dean"
from pygametemplate.game import Game
from pygametemplate.view import View
|
"""pygametemplate module for making creating games with Pygame easier."""
from __future__ import absolute_import
__version__ = "0.2.0"
__author__ = "Andrew Dean"
from pygametemplate.game import Game
+ from pygametemplate.view import View
|
2a550df5d9200deb6700fca4270526633811d592
|
osfclient/cli.py
|
osfclient/cli.py
|
"""Command line interface to the OSF"""
import os
from .api import OSF
CHUNK_SIZE = int(5e6)
def _setup_osf(args):
# command line argument overrides environment variable
username = os.getenv("OSF_USERNAME")
if args.username is not None:
username = args.username
password = os.getenv("OSF_PASSWORD")
return OSF(username=username, password=password)
def fetch(args):
osf = _setup_osf(args)
project = osf.project(args.project)
output_dir = args.project
if args.output is not None:
output_dir = args.output
for store in project.storages:
prefix = os.path.join(output_dir, store.name)
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
path = os.path.join(prefix, path)
directory, _ = os.path.split(path)
os.makedirs(directory, exist_ok=True)
with open(path, "wb") as f:
file_.write_to(f)
def list_(args):
osf = _setup_osf(args)
project = osf.project(args.project)
for store in project.storages:
prefix = store.name
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
print(os.path.join(prefix, path))
|
"""Command line interface to the OSF"""
import os
from .api import OSF
CHUNK_SIZE = int(5e6)
def _setup_osf(args):
# command line argument overrides environment variable
username = os.getenv("OSF_USERNAME")
if args.username is not None:
username = args.username
password = None
if username is not None:
password = os.getenv("OSF_PASSWORD")
return OSF(username=username, password=password)
def fetch(args):
osf = _setup_osf(args)
project = osf.project(args.project)
output_dir = args.project
if args.output is not None:
output_dir = args.output
for store in project.storages:
prefix = os.path.join(output_dir, store.name)
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
path = os.path.join(prefix, path)
directory, _ = os.path.split(path)
os.makedirs(directory, exist_ok=True)
with open(path, "wb") as f:
file_.write_to(f)
def list_(args):
osf = _setup_osf(args)
project = osf.project(args.project)
for store in project.storages:
prefix = store.name
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
print(os.path.join(prefix, path))
|
Stop grabbing password when there is no username
|
Stop grabbing password when there is no username
|
Python
|
bsd-3-clause
|
betatim/osf-cli,betatim/osf-cli
|
"""Command line interface to the OSF"""
import os
from .api import OSF
CHUNK_SIZE = int(5e6)
def _setup_osf(args):
# command line argument overrides environment variable
username = os.getenv("OSF_USERNAME")
if args.username is not None:
username = args.username
+ password = None
+ if username is not None:
- password = os.getenv("OSF_PASSWORD")
+ password = os.getenv("OSF_PASSWORD")
return OSF(username=username, password=password)
def fetch(args):
osf = _setup_osf(args)
project = osf.project(args.project)
output_dir = args.project
if args.output is not None:
output_dir = args.output
for store in project.storages:
prefix = os.path.join(output_dir, store.name)
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
path = os.path.join(prefix, path)
directory, _ = os.path.split(path)
os.makedirs(directory, exist_ok=True)
with open(path, "wb") as f:
file_.write_to(f)
def list_(args):
osf = _setup_osf(args)
project = osf.project(args.project)
for store in project.storages:
prefix = store.name
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
print(os.path.join(prefix, path))
|
Stop grabbing password when there is no username
|
## Code Before:
"""Command line interface to the OSF"""
import os
from .api import OSF
CHUNK_SIZE = int(5e6)
def _setup_osf(args):
# command line argument overrides environment variable
username = os.getenv("OSF_USERNAME")
if args.username is not None:
username = args.username
password = os.getenv("OSF_PASSWORD")
return OSF(username=username, password=password)
def fetch(args):
osf = _setup_osf(args)
project = osf.project(args.project)
output_dir = args.project
if args.output is not None:
output_dir = args.output
for store in project.storages:
prefix = os.path.join(output_dir, store.name)
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
path = os.path.join(prefix, path)
directory, _ = os.path.split(path)
os.makedirs(directory, exist_ok=True)
with open(path, "wb") as f:
file_.write_to(f)
def list_(args):
osf = _setup_osf(args)
project = osf.project(args.project)
for store in project.storages:
prefix = store.name
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
print(os.path.join(prefix, path))
## Instruction:
Stop grabbing password when there is no username
## Code After:
"""Command line interface to the OSF"""
import os
from .api import OSF
CHUNK_SIZE = int(5e6)
def _setup_osf(args):
# command line argument overrides environment variable
username = os.getenv("OSF_USERNAME")
if args.username is not None:
username = args.username
password = None
if username is not None:
password = os.getenv("OSF_PASSWORD")
return OSF(username=username, password=password)
def fetch(args):
osf = _setup_osf(args)
project = osf.project(args.project)
output_dir = args.project
if args.output is not None:
output_dir = args.output
for store in project.storages:
prefix = os.path.join(output_dir, store.name)
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
path = os.path.join(prefix, path)
directory, _ = os.path.split(path)
os.makedirs(directory, exist_ok=True)
with open(path, "wb") as f:
file_.write_to(f)
def list_(args):
osf = _setup_osf(args)
project = osf.project(args.project)
for store in project.storages:
prefix = store.name
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
print(os.path.join(prefix, path))
|
"""Command line interface to the OSF"""
import os
from .api import OSF
CHUNK_SIZE = int(5e6)
def _setup_osf(args):
# command line argument overrides environment variable
username = os.getenv("OSF_USERNAME")
if args.username is not None:
username = args.username
+ password = None
+ if username is not None:
- password = os.getenv("OSF_PASSWORD")
+ password = os.getenv("OSF_PASSWORD")
? ++++
return OSF(username=username, password=password)
def fetch(args):
osf = _setup_osf(args)
project = osf.project(args.project)
output_dir = args.project
if args.output is not None:
output_dir = args.output
for store in project.storages:
prefix = os.path.join(output_dir, store.name)
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
path = os.path.join(prefix, path)
directory, _ = os.path.split(path)
os.makedirs(directory, exist_ok=True)
with open(path, "wb") as f:
file_.write_to(f)
def list_(args):
osf = _setup_osf(args)
project = osf.project(args.project)
for store in project.storages:
prefix = store.name
for file_ in store.files:
path = file_.path
if path.startswith('/'):
path = path[1:]
print(os.path.join(prefix, path))
|
a6fe31d7f687df6934143fd2dda1cd323f3d31fb
|
uvloop/_patch.py
|
uvloop/_patch.py
|
import asyncio
from asyncio import coroutines
def _format_coroutine(coro):
if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'):
# Most likely a Cython coroutine
coro_name = '{}()'.format(coro.__qualname__ or coro.__name__)
if coro.cr_running:
return '{} running'.format(coro_name)
else:
return coro_name
return _old_format_coroutine(coro)
_old_format_coroutine = coroutines._format_coroutine
coroutines._format_coroutine = _format_coroutine
|
import asyncio
from asyncio import coroutines
def _format_coroutine(coro):
if asyncio.iscoroutine(coro) \
and not hasattr(coro, 'cr_code') \
and not hasattr(coro, 'gi_code'):
# Most likely a Cython coroutine
coro_name = '{}()'.format(coro.__qualname__ or coro.__name__)
running = False
try:
running = coro.cr_running
except AttributeError:
try:
running = coro.gi_running
except AttributeError:
pass
if running:
return '{} running'.format(coro_name)
else:
return coro_name
return _old_format_coroutine(coro)
_old_format_coroutine = coroutines._format_coroutine
coroutines._format_coroutine = _format_coroutine
|
Fix patched _format_coroutine to support Cython generators
|
Fix patched _format_coroutine to support Cython generators
|
Python
|
apache-2.0
|
1st1/uvloop,MagicStack/uvloop,MagicStack/uvloop
|
import asyncio
from asyncio import coroutines
def _format_coroutine(coro):
- if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'):
+ if asyncio.iscoroutine(coro) \
+ and not hasattr(coro, 'cr_code') \
+ and not hasattr(coro, 'gi_code'):
+
# Most likely a Cython coroutine
coro_name = '{}()'.format(coro.__qualname__ or coro.__name__)
+
+ running = False
+ try:
+ running = coro.cr_running
+ except AttributeError:
+ try:
+ running = coro.gi_running
+ except AttributeError:
+ pass
+
- if coro.cr_running:
+ if running:
return '{} running'.format(coro_name)
else:
return coro_name
return _old_format_coroutine(coro)
_old_format_coroutine = coroutines._format_coroutine
coroutines._format_coroutine = _format_coroutine
|
Fix patched _format_coroutine to support Cython generators
|
## Code Before:
import asyncio
from asyncio import coroutines
def _format_coroutine(coro):
if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'):
# Most likely a Cython coroutine
coro_name = '{}()'.format(coro.__qualname__ or coro.__name__)
if coro.cr_running:
return '{} running'.format(coro_name)
else:
return coro_name
return _old_format_coroutine(coro)
_old_format_coroutine = coroutines._format_coroutine
coroutines._format_coroutine = _format_coroutine
## Instruction:
Fix patched _format_coroutine to support Cython generators
## Code After:
import asyncio
from asyncio import coroutines
def _format_coroutine(coro):
if asyncio.iscoroutine(coro) \
and not hasattr(coro, 'cr_code') \
and not hasattr(coro, 'gi_code'):
# Most likely a Cython coroutine
coro_name = '{}()'.format(coro.__qualname__ or coro.__name__)
running = False
try:
running = coro.cr_running
except AttributeError:
try:
running = coro.gi_running
except AttributeError:
pass
if running:
return '{} running'.format(coro_name)
else:
return coro_name
return _old_format_coroutine(coro)
_old_format_coroutine = coroutines._format_coroutine
coroutines._format_coroutine = _format_coroutine
|
import asyncio
from asyncio import coroutines
def _format_coroutine(coro):
- if asyncio.iscoroutine(coro) and not hasattr(coro, 'cr_code'):
+ if asyncio.iscoroutine(coro) \
+ and not hasattr(coro, 'cr_code') \
+ and not hasattr(coro, 'gi_code'):
+
# Most likely a Cython coroutine
coro_name = '{}()'.format(coro.__qualname__ or coro.__name__)
+
+ running = False
+ try:
+ running = coro.cr_running
+ except AttributeError:
+ try:
+ running = coro.gi_running
+ except AttributeError:
+ pass
+
- if coro.cr_running:
? --------
+ if running:
return '{} running'.format(coro_name)
else:
return coro_name
return _old_format_coroutine(coro)
_old_format_coroutine = coroutines._format_coroutine
coroutines._format_coroutine = _format_coroutine
|
85faea2a9185924d1255e84aad1489f7e3627d13
|
django_lightweight_queue/utils.py
|
django_lightweight_queue/utils.py
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
Add setproctitle wrapper so it's optional.
|
Add setproctitle wrapper so it's optional.
Signed-off-by: Chris Lamb <[email protected]>
|
Python
|
bsd-3-clause
|
prophile/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue,thread/django-lightweight-queue
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
+ try:
+ from setproctitle import setproctitle
+ except ImportError:
+ def setproctitle(title):
+ pass
+
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
Add setproctitle wrapper so it's optional.
|
## Code Before:
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
## Instruction:
Add setproctitle wrapper so it's optional.
## Code After:
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
from django.db import models
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import MiddlewareNotUsed
from django.utils.functional import memoize
from django.utils.module_loading import module_has_submodule
from . import app_settings
def get_path(path):
module_name, attr = path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, attr)
def get_backend():
return get_path(app_settings.BACKEND)()
def get_middleware():
middleware = []
for path in app_settings.MIDDLEWARE:
try:
middleware.append(get_path(path)())
except MiddlewareNotUsed:
pass
return middleware
+ try:
+ from setproctitle import setproctitle
+ except ImportError:
+ def setproctitle(title):
+ pass
+
get_path = memoize(get_path, {}, 1)
get_backend = memoize(get_backend, {}, 0)
get_middleware = memoize(get_middleware, {}, 0)
|
802bed896c147fc6bb6dc72f62a80236bc3cd263
|
soccermetrics/rest/resources/personnel.py
|
soccermetrics/rest/resources/personnel.py
|
from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource
|
from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource
|
Remove stray non-ASCII character in docstring
|
Remove stray non-ASCII character in docstring
|
Python
|
mit
|
soccermetrics/soccermetrics-client-py
|
from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
- data on all personnel involved in a football match – players,
+ data on the following personnel involved in a football match:
- managers, and match referees.
+
+ * Players,
+ * Managers,
+ * Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource
|
Remove stray non-ASCII character in docstring
|
## Code Before:
from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource
## Instruction:
Remove stray non-ASCII character in docstring
## Code After:
from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource
|
from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
- data on all personnel involved in a football match – players,
? ^ ^^^^^^^^^^^
+ data on the following personnel involved in a football match:
? ^^^^^^ +++++ ^
- managers, and match referees.
+
+ * Players,
+ * Managers,
+ * Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource
|
9633f3ee1a3431cb373a4652afbfc2cd8b3b4c23
|
test_utils/anki/__init__.py
|
test_utils/anki/__init__.py
|
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self):
self.shadowed_modules = {}
for module in self.modules_list:
self.shadowed_modules[module] = sys.modules.get(module)
sys.modules[module] = MagicMock()
def unmock(self):
for module in self.modules_list:
shadowed_module = self.shadowed_modules[module]
if shadowed_module is not None:
sys.modules[module] = shadowed_module
else:
if module in sys.modules:
del sys.modules[module]
|
from typing import List
from typing import Optional
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self, module_names_list: Optional[List[str]] = None):
if module_names_list is None:
module_names_list = self.module_names_list
self.shadowed_modules = {}
for module_name in module_names_list:
self.shadowed_modules[module_name] = sys.modules.get(module_name)
sys.modules[module_name] = MagicMock()
def unmock(self):
for module_name, module in self.shadowed_modules.items():
if module is not None:
sys.modules[module_name] = module
else:
if module_name in sys.modules:
del sys.modules[module_name]
|
Allow specifying modules to be mocked
|
Allow specifying modules to be mocked
|
Python
|
mit
|
Stvad/CrowdAnki,Stvad/CrowdAnki,Stvad/CrowdAnki
|
+ from typing import List
+ from typing import Optional
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
- modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
+ module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
- 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
+ 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
- def __init__(self):
+ def __init__(self, module_names_list: Optional[List[str]] = None):
+ if module_names_list is None:
+ module_names_list = self.module_names_list
+
self.shadowed_modules = {}
- for module in self.modules_list:
+ for module_name in module_names_list:
- self.shadowed_modules[module] = sys.modules.get(module)
+ self.shadowed_modules[module_name] = sys.modules.get(module_name)
- sys.modules[module] = MagicMock()
+ sys.modules[module_name] = MagicMock()
def unmock(self):
+ for module_name, module in self.shadowed_modules.items():
- for module in self.modules_list:
- shadowed_module = self.shadowed_modules[module]
- if shadowed_module is not None:
+ if module is not None:
- sys.modules[module] = shadowed_module
+ sys.modules[module_name] = module
else:
- if module in sys.modules:
+ if module_name in sys.modules:
- del sys.modules[module]
+ del sys.modules[module_name]
|
Allow specifying modules to be mocked
|
## Code Before:
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self):
self.shadowed_modules = {}
for module in self.modules_list:
self.shadowed_modules[module] = sys.modules.get(module)
sys.modules[module] = MagicMock()
def unmock(self):
for module in self.modules_list:
shadowed_module = self.shadowed_modules[module]
if shadowed_module is not None:
sys.modules[module] = shadowed_module
else:
if module in sys.modules:
del sys.modules[module]
## Instruction:
Allow specifying modules to be mocked
## Code After:
from typing import List
from typing import Optional
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self, module_names_list: Optional[List[str]] = None):
if module_names_list is None:
module_names_list = self.module_names_list
self.shadowed_modules = {}
for module_name in module_names_list:
self.shadowed_modules[module_name] = sys.modules.get(module_name)
sys.modules[module_name] = MagicMock()
def unmock(self):
for module_name, module in self.shadowed_modules.items():
if module is not None:
sys.modules[module_name] = module
else:
if module_name in sys.modules:
del sys.modules[module_name]
|
+ from typing import List
+ from typing import Optional
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
- modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
+ module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
? +++++
- 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
+ 'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
? +++++
- def __init__(self):
+ def __init__(self, module_names_list: Optional[List[str]] = None):
+ if module_names_list is None:
+ module_names_list = self.module_names_list
+
self.shadowed_modules = {}
- for module in self.modules_list:
? -----
+ for module_name in module_names_list:
? +++++ +++++
- self.shadowed_modules[module] = sys.modules.get(module)
+ self.shadowed_modules[module_name] = sys.modules.get(module_name)
? +++++ +++++
- sys.modules[module] = MagicMock()
+ sys.modules[module_name] = MagicMock()
? +++++
def unmock(self):
+ for module_name, module in self.shadowed_modules.items():
- for module in self.modules_list:
- shadowed_module = self.shadowed_modules[module]
- if shadowed_module is not None:
? ---------
+ if module is not None:
- sys.modules[module] = shadowed_module
? ---------
+ sys.modules[module_name] = module
? +++++
else:
- if module in sys.modules:
+ if module_name in sys.modules:
? +++++
- del sys.modules[module]
+ del sys.modules[module_name]
? +++++
|
6b3f568a6615e9439fc0df0eac68838b6cbda0d9
|
anti-XSS.py
|
anti-XSS.py
|
'''
Copyright (c) 2016 anti-XSS developers
'''
import sys
from lib.core.link import Link
from optparse import OptionParser
from lib.core.engine import getPage
from lib.core.engine import getScript
from lib.core.engine import xssScanner
from lib.generator.report import gnrReport
def main():
parser = OptionParser()
parser.add_option('-u', '--url', dest='startUrl', help='Target URL (e.g. \'http://www.site.com/\')')
parser.add_option('-d', '--depth', dest='depth', help='The depth you want to scan (default: 2)')
(options, args) = parser.parse_args()
if options.startUrl:
rootLink = Link(options.startUrl, options.startUrl)
if options.depth:
getPage(rootLink, int(options.depth))
else:
getPage(rootLink, 2)
getScript()
xssScanner()
pass
if __name__ == '__main__':
main()
|
'''
Copyright (c) 2016 anti-XSS developers
'''
import sys
from lib.core.urlfun import *
from lib.core.link import Link
from optparse import OptionParser
from lib.core.engine import getPage
from lib.core.engine import getScript
from lib.core.engine import xssScanner
from lib.generator.report import gnrReport
def main():
parser = OptionParser()
parser.add_option('-u', '--url', dest='startUrl', help='Target URL (e.g. \'http://www.site.com/\')')
parser.add_option('-d', '--depth', dest='depth', help='The depth you want to scan (default: 2)')
(options, args) = parser.parse_args()
if options.startUrl:
url = initialize(options.startUrl)
rootLink = Link(url, url)
if options.depth:
getPage(rootLink, int(options.depth))
else:
getPage(rootLink, 2)
getScript()
xssScanner()
pass
if __name__ == '__main__':
main()
|
Add initialization before get url
|
Add initialization before get url
|
Python
|
mit
|
lewangbtcc/anti-XSS,lewangbtcc/anti-XSS
|
'''
Copyright (c) 2016 anti-XSS developers
'''
import sys
+
+ from lib.core.urlfun import *
from lib.core.link import Link
from optparse import OptionParser
from lib.core.engine import getPage
from lib.core.engine import getScript
from lib.core.engine import xssScanner
from lib.generator.report import gnrReport
def main():
parser = OptionParser()
parser.add_option('-u', '--url', dest='startUrl', help='Target URL (e.g. \'http://www.site.com/\')')
parser.add_option('-d', '--depth', dest='depth', help='The depth you want to scan (default: 2)')
(options, args) = parser.parse_args()
if options.startUrl:
- rootLink = Link(options.startUrl, options.startUrl)
+ url = initialize(options.startUrl)
+ rootLink = Link(url, url)
if options.depth:
getPage(rootLink, int(options.depth))
else:
getPage(rootLink, 2)
getScript()
xssScanner()
pass
if __name__ == '__main__':
main()
|
Add initialization before get url
|
## Code Before:
'''
Copyright (c) 2016 anti-XSS developers
'''
import sys
from lib.core.link import Link
from optparse import OptionParser
from lib.core.engine import getPage
from lib.core.engine import getScript
from lib.core.engine import xssScanner
from lib.generator.report import gnrReport
def main():
parser = OptionParser()
parser.add_option('-u', '--url', dest='startUrl', help='Target URL (e.g. \'http://www.site.com/\')')
parser.add_option('-d', '--depth', dest='depth', help='The depth you want to scan (default: 2)')
(options, args) = parser.parse_args()
if options.startUrl:
rootLink = Link(options.startUrl, options.startUrl)
if options.depth:
getPage(rootLink, int(options.depth))
else:
getPage(rootLink, 2)
getScript()
xssScanner()
pass
if __name__ == '__main__':
main()
## Instruction:
Add initialization before get url
## Code After:
'''
Copyright (c) 2016 anti-XSS developers
'''
import sys
from lib.core.urlfun import *
from lib.core.link import Link
from optparse import OptionParser
from lib.core.engine import getPage
from lib.core.engine import getScript
from lib.core.engine import xssScanner
from lib.generator.report import gnrReport
def main():
parser = OptionParser()
parser.add_option('-u', '--url', dest='startUrl', help='Target URL (e.g. \'http://www.site.com/\')')
parser.add_option('-d', '--depth', dest='depth', help='The depth you want to scan (default: 2)')
(options, args) = parser.parse_args()
if options.startUrl:
url = initialize(options.startUrl)
rootLink = Link(url, url)
if options.depth:
getPage(rootLink, int(options.depth))
else:
getPage(rootLink, 2)
getScript()
xssScanner()
pass
if __name__ == '__main__':
main()
|
'''
Copyright (c) 2016 anti-XSS developers
'''
import sys
+
+ from lib.core.urlfun import *
from lib.core.link import Link
from optparse import OptionParser
from lib.core.engine import getPage
from lib.core.engine import getScript
from lib.core.engine import xssScanner
from lib.generator.report import gnrReport
def main():
parser = OptionParser()
parser.add_option('-u', '--url', dest='startUrl', help='Target URL (e.g. \'http://www.site.com/\')')
parser.add_option('-d', '--depth', dest='depth', help='The depth you want to scan (default: 2)')
(options, args) = parser.parse_args()
if options.startUrl:
- rootLink = Link(options.startUrl, options.startUrl)
+ url = initialize(options.startUrl)
+ rootLink = Link(url, url)
if options.depth:
getPage(rootLink, int(options.depth))
else:
getPage(rootLink, 2)
getScript()
xssScanner()
pass
if __name__ == '__main__':
main()
|
6432d92533953c2873b315945254e5260a109106
|
cs251tk/student/markdownify/check_submit_date.py
|
cs251tk/student/markdownify/check_submit_date.py
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
Modify way to find earliest date
|
Modify way to find earliest date
|
Python
|
mit
|
StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
- # Sort dates earliest to latest
- dates.sort()
-
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
- return dates[0].strftime("%x %X")
+ return min(dates).strftime("%x %X")
|
Modify way to find earliest date
|
## Code Before:
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
## Instruction:
Modify way to find earliest date
## Code After:
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
- # Sort dates earliest to latest
- dates.sort()
-
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
- return dates[0].strftime("%x %X")
? ^^^
+ return min(dates).strftime("%x %X")
? ++++ ^
|
2306478f67a93e27dd9d7d397f97e3641df3516a
|
ipython_startup.py
|
ipython_startup.py
|
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
|
from __future__ import division
from __future__ import absolute_import
import scipy as sp
import itertools as it
import functools as ft
import operator as op
import sys
import sympy
# Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.pyplot import subplots
from matplotlib.pyplot import show as pltshow
# and import some common functions into the global namespace
from scipy.linalg import norm
from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
from math import atan2, acos
from sympy import Rational as sRat
from sympy import pretty as spretty
|
Add lots of useful default imports to ipython
|
Add lots of useful default imports to ipython
|
Python
|
cc0-1.0
|
davidshepherd7/dotfiles,davidshepherd7/dotfiles,davidshepherd7/dotfiles,davidshepherd7/dotfiles,davidshepherd7/dotfiles
|
+ from __future__ import division
+ from __future__ import absolute_import
+
import scipy as sp
+ import itertools as it
+ import functools as ft
+ import operator as op
+ import sys
+ import sympy
+
+
+ # Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
+ from matplotlib.pyplot import subplots
+ from matplotlib.pyplot import show as pltshow
+
+ # and import some common functions into the global namespace
+ from scipy.linalg import norm
+ from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
+ from math import atan2, acos
+ from sympy import Rational as sRat
+ from sympy import pretty as spretty
+
|
Add lots of useful default imports to ipython
|
## Code Before:
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
## Instruction:
Add lots of useful default imports to ipython
## Code After:
from __future__ import division
from __future__ import absolute_import
import scipy as sp
import itertools as it
import functools as ft
import operator as op
import sys
import sympy
# Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.pyplot import subplots
from matplotlib.pyplot import show as pltshow
# and import some common functions into the global namespace
from scipy.linalg import norm
from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
from math import atan2, acos
from sympy import Rational as sRat
from sympy import pretty as spretty
|
+ from __future__ import division
+ from __future__ import absolute_import
+
import scipy as sp
+ import itertools as it
+ import functools as ft
+ import operator as op
+ import sys
+ import sympy
+
+
+ # Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
+ from matplotlib.pyplot import subplots
+ from matplotlib.pyplot import show as pltshow
+
+
+ # and import some common functions into the global namespace
+ from scipy.linalg import norm
+ from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
+ from math import atan2, acos
+ from sympy import Rational as sRat
+ from sympy import pretty as spretty
|
8803f6058255237dff39549426ca6a513a25193c
|
website_product_supplier/__openerp__.py
|
website_product_supplier/__openerp__.py
|
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
Add images key in manifest file
|
[FIX] website_product_supplier: Add images key in manifest file
|
Python
|
agpl-3.0
|
nuobit/website,open-synergy/website,gfcapalbo/website,LasLabs/website,acsone/website,nuobit/website,LasLabs/website,Yajo/website,LasLabs/website,gfcapalbo/website,kaerdsar/website,Yajo/website,nuobit/website,nuobit/website,Yajo/website,gfcapalbo/website,acsone/website,kaerdsar/website,LasLabs/website,open-synergy/website,acsone/website,acsone/website,gfcapalbo/website,open-synergy/website,brain-tec/website,brain-tec/website,open-synergy/website,brain-tec/website,kaerdsar/website,brain-tec/website,Yajo/website
|
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
+ 'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
Add images key in manifest file
|
## Code Before:
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
## Instruction:
Add images key in manifest file
## Code After:
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
{
'name': "Website Product Supplier",
'category': 'Website',
'version': '8.0.1.0.0',
'depends': [
'website_sale',
'website_portal_purchase',
],
'data': [
'security/ir.model.access.csv',
'views/product_supplier_view.xml',
'views/website_portal.xml',
'views/assets.xml',
'security/website_product_supplier_security.xml',
],
+ 'images': [],
'qweb': ['static/src/xml/*.xml'],
'author': 'Antiun Ingeniería S.L., '
'Incaser Informatica S.L., '
'Odoo Community Association (OCA)',
'website': 'http://www.antiun.com',
'license': 'AGPL-3',
'installable': True,
}
|
78031ca1077a224d37c4f549cd8dac55edd4ed5f
|
fragdenstaat_de/urls.py
|
fragdenstaat_de/urls.py
|
from django.conf.urls.defaults import patterns
urlpatterns = patterns('fragdenstaat_de.views',
(r'^presse/(?P<slug>[-\w]+)/$', 'show_press', {}, 'fds-show_press'),
)
|
from django.conf.urls import patterns, url
from django.http import HttpResponseRedirect
urlpatterns = patterns('fragdenstaat_de.views',
(r'^presse/(?P<slug>[-\w]+)/$', 'show_press', {}, 'fds-show_press'),
url(r'^nordrhein-westfalen/', lambda request: HttpResponseRedirect('/nrw/'),
name="jurisdiction-nrw-redirect")
)
|
Add custom NRW redirect url
|
Add custom NRW redirect url
|
Python
|
mit
|
okfse/fragastaten_se,catcosmo/fragdenstaat_de,okfse/fragdenstaat_de,okfse/fragastaten_se,catcosmo/fragdenstaat_de,okfse/fragdenstaat_de
|
- from django.conf.urls.defaults import patterns
+ from django.conf.urls import patterns, url
+ from django.http import HttpResponseRedirect
urlpatterns = patterns('fragdenstaat_de.views',
(r'^presse/(?P<slug>[-\w]+)/$', 'show_press', {}, 'fds-show_press'),
+ url(r'^nordrhein-westfalen/', lambda request: HttpResponseRedirect('/nrw/'),
+ name="jurisdiction-nrw-redirect")
)
|
Add custom NRW redirect url
|
## Code Before:
from django.conf.urls.defaults import patterns
urlpatterns = patterns('fragdenstaat_de.views',
(r'^presse/(?P<slug>[-\w]+)/$', 'show_press', {}, 'fds-show_press'),
)
## Instruction:
Add custom NRW redirect url
## Code After:
from django.conf.urls import patterns, url
from django.http import HttpResponseRedirect
urlpatterns = patterns('fragdenstaat_de.views',
(r'^presse/(?P<slug>[-\w]+)/$', 'show_press', {}, 'fds-show_press'),
url(r'^nordrhein-westfalen/', lambda request: HttpResponseRedirect('/nrw/'),
name="jurisdiction-nrw-redirect")
)
|
- from django.conf.urls.defaults import patterns
? ---------
+ from django.conf.urls import patterns, url
? +++++
+ from django.http import HttpResponseRedirect
urlpatterns = patterns('fragdenstaat_de.views',
(r'^presse/(?P<slug>[-\w]+)/$', 'show_press', {}, 'fds-show_press'),
+ url(r'^nordrhein-westfalen/', lambda request: HttpResponseRedirect('/nrw/'),
+ name="jurisdiction-nrw-redirect")
)
|
c7cb6c1441bcfe359a9179858492044591e80007
|
osgtest/tests/test_10_condor.py
|
osgtest/tests/test_10_condor.py
|
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor')
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
|
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor', chmod=0o644)
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
|
Make the personal condor config world readable
|
Make the personal condor config world readable
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
- files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor')
+ files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor', chmod=0o644)
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
|
Make the personal condor config world readable
|
## Code Before:
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor')
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
## Instruction:
Make the personal condor config world readable
## Code After:
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor', chmod=0o644)
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
|
from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
- files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor')
+ files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor', chmod=0o644)
? +++++++++++++
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
|
55378f71e8553eaad606433ae9e871983e99bd26
|
pastalog/pastalog/__init__.py
|
pastalog/pastalog/__init__.py
|
'''
The pastalog Log class, which simply sends a POST request to a the server.
'''
import requests
class Log(object):
def __init__(self, url, model_name):
self.url = url
self.model_name = model_name
def post(self, series_name, value, step):
payload = {"modelName": self.model_name,
"pointType": series_name,
"pointValue": value,
"globalStep": step}
r = requests.post(self.url, json=payload)
return r.raise_for_status()
|
'''
The pastalog Log class, which simply sends a POST request to a the server.
'''
import requests
import os
class Log(object):
def __init__(self, url, model_name):
self.url = os.path.join(url, 'data')
self.model_name = model_name
def post(self, series_name, value, step):
payload = {"modelName": self.model_name,
"pointType": series_name,
"pointValue": value,
"globalStep": step}
r = requests.post(self.url, json=payload)
return r.raise_for_status()
|
Update to post to correct endpoint
|
Update to post to correct endpoint
|
Python
|
mit
|
rewonc/pastalog,rewonc/pastalog,rewonc/pastalog
|
'''
The pastalog Log class, which simply sends a POST request to a the server.
'''
import requests
+ import os
class Log(object):
def __init__(self, url, model_name):
- self.url = url
+ self.url = os.path.join(url, 'data')
self.model_name = model_name
def post(self, series_name, value, step):
payload = {"modelName": self.model_name,
"pointType": series_name,
"pointValue": value,
"globalStep": step}
r = requests.post(self.url, json=payload)
return r.raise_for_status()
|
Update to post to correct endpoint
|
## Code Before:
'''
The pastalog Log class, which simply sends a POST request to a the server.
'''
import requests
class Log(object):
def __init__(self, url, model_name):
self.url = url
self.model_name = model_name
def post(self, series_name, value, step):
payload = {"modelName": self.model_name,
"pointType": series_name,
"pointValue": value,
"globalStep": step}
r = requests.post(self.url, json=payload)
return r.raise_for_status()
## Instruction:
Update to post to correct endpoint
## Code After:
'''
The pastalog Log class, which simply sends a POST request to a the server.
'''
import requests
import os
class Log(object):
def __init__(self, url, model_name):
self.url = os.path.join(url, 'data')
self.model_name = model_name
def post(self, series_name, value, step):
payload = {"modelName": self.model_name,
"pointType": series_name,
"pointValue": value,
"globalStep": step}
r = requests.post(self.url, json=payload)
return r.raise_for_status()
|
'''
The pastalog Log class, which simply sends a POST request to a the server.
'''
import requests
+ import os
class Log(object):
def __init__(self, url, model_name):
- self.url = url
+ self.url = os.path.join(url, 'data')
self.model_name = model_name
def post(self, series_name, value, step):
payload = {"modelName": self.model_name,
"pointType": series_name,
"pointValue": value,
"globalStep": step}
r = requests.post(self.url, json=payload)
return r.raise_for_status()
|
f00c7f3a976ba4790963a5701c5ce13f6dcd84fa
|
tests/test_funcmakers.py
|
tests/test_funcmakers.py
|
import inspect
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
Remove unused import from tests
|
Remove unused import from tests
|
Python
|
bsd-3-clause
|
Suor/funcy
|
- import inspect
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
Remove unused import from tests
|
## Code Before:
import inspect
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
## Instruction:
Remove unused import from tests
## Code After:
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
- import inspect
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
3a44dbeec871aa057c4d5b42c9089a8d2b649063
|
django_agpl/urls.py
|
django_agpl/urls.py
|
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
Drop patterns import for Django 1.0 compatibility.
|
Drop patterns import for Django 1.0 compatibility.
|
Python
|
agpl-3.0
|
lamby/django-agpl,lamby/django-agpl
|
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
from . import views
- urlpatterns = patterns('django_agpl.views',
+ urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
Drop patterns import for Django 1.0 compatibility.
|
## Code Before:
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
## Instruction:
Drop patterns import for Django 1.0 compatibility.
## Code After:
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
- from django.conf.urls import patterns, url
? ----------
+ from django.conf.urls import url
from . import views
- urlpatterns = patterns('django_agpl.views',
+ urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
d31d2a73127a79566651e644d105cbe2063a6e2a
|
webapp/publish.py
|
webapp/publish.py
|
from cloudly.pubsub import Pusher
from cloudly.tweets import Tweets
from cloudly.twitterstream import Streamer
from webapp import config
class Publisher(Pusher):
def publish(self, tweets, event):
"""Keep only relevant fields from the given tweets."""
stripped = []
for tweet in tweets:
stripped.append({
'coordinates': tweet['coordinates'],
})
super(Publisher, self).publish(stripped, event)
def processor(tweet):
return True
def start():
# This trick of importing the current module is for RQ workers to
# correctly unpickle the `processor` function.
from webapp import publish
pubsub = publish.Publisher.open(config.pubsub_channel)
streamer = Streamer(publish.processor, pubsub=pubsub, is_queuing=True,
cache_length=100)
tweets = Tweets()
streamer.run(tweets.with_coordinates())
if __name__ == "__main__":
start()
|
from cloudly.pubsub import Pusher
from cloudly.tweets import Tweets, StreamManager, keep
from webapp import config
pubsub = Pusher.open(config.pubsub_channel)
def processor(tweets):
pubsub.publish(keep(['coordinates'], tweets), "tweets")
return len(tweets)
def start():
streamer = StreamManager('locate', processor, is_queuing=False)
tweets = Tweets()
streamer.run(tweets.with_coordinates())
if __name__ == "__main__":
start()
|
Fix for the new cloudly APi.
|
Fix for the new cloudly APi.
|
Python
|
mit
|
hdemers/webapp-template,hdemers/webapp-template,hdemers/webapp-template
|
from cloudly.pubsub import Pusher
- from cloudly.tweets import Tweets
+ from cloudly.tweets import Tweets, StreamManager, keep
- from cloudly.twitterstream import Streamer
from webapp import config
+ pubsub = Pusher.open(config.pubsub_channel)
-
- class Publisher(Pusher):
- def publish(self, tweets, event):
- """Keep only relevant fields from the given tweets."""
- stripped = []
- for tweet in tweets:
- stripped.append({
- 'coordinates': tweet['coordinates'],
- })
- super(Publisher, self).publish(stripped, event)
- def processor(tweet):
+ def processor(tweets):
- return True
+ pubsub.publish(keep(['coordinates'], tweets), "tweets")
+ return len(tweets)
def start():
+ streamer = StreamManager('locate', processor, is_queuing=False)
- # This trick of importing the current module is for RQ workers to
- # correctly unpickle the `processor` function.
- from webapp import publish
-
- pubsub = publish.Publisher.open(config.pubsub_channel)
- streamer = Streamer(publish.processor, pubsub=pubsub, is_queuing=True,
- cache_length=100)
-
tweets = Tweets()
streamer.run(tweets.with_coordinates())
if __name__ == "__main__":
start()
|
Fix for the new cloudly APi.
|
## Code Before:
from cloudly.pubsub import Pusher
from cloudly.tweets import Tweets
from cloudly.twitterstream import Streamer
from webapp import config
class Publisher(Pusher):
def publish(self, tweets, event):
"""Keep only relevant fields from the given tweets."""
stripped = []
for tweet in tweets:
stripped.append({
'coordinates': tweet['coordinates'],
})
super(Publisher, self).publish(stripped, event)
def processor(tweet):
return True
def start():
# This trick of importing the current module is for RQ workers to
# correctly unpickle the `processor` function.
from webapp import publish
pubsub = publish.Publisher.open(config.pubsub_channel)
streamer = Streamer(publish.processor, pubsub=pubsub, is_queuing=True,
cache_length=100)
tweets = Tweets()
streamer.run(tweets.with_coordinates())
if __name__ == "__main__":
start()
## Instruction:
Fix for the new cloudly APi.
## Code After:
from cloudly.pubsub import Pusher
from cloudly.tweets import Tweets, StreamManager, keep
from webapp import config
pubsub = Pusher.open(config.pubsub_channel)
def processor(tweets):
pubsub.publish(keep(['coordinates'], tweets), "tweets")
return len(tweets)
def start():
streamer = StreamManager('locate', processor, is_queuing=False)
tweets = Tweets()
streamer.run(tweets.with_coordinates())
if __name__ == "__main__":
start()
|
from cloudly.pubsub import Pusher
- from cloudly.tweets import Tweets
+ from cloudly.tweets import Tweets, StreamManager, keep
? +++++++++++++++++++++
- from cloudly.twitterstream import Streamer
from webapp import config
+ pubsub = Pusher.open(config.pubsub_channel)
-
- class Publisher(Pusher):
- def publish(self, tweets, event):
- """Keep only relevant fields from the given tweets."""
- stripped = []
- for tweet in tweets:
- stripped.append({
- 'coordinates': tweet['coordinates'],
- })
- super(Publisher, self).publish(stripped, event)
- def processor(tweet):
+ def processor(tweets):
? +
- return True
+ pubsub.publish(keep(['coordinates'], tweets), "tweets")
+ return len(tweets)
def start():
+ streamer = StreamManager('locate', processor, is_queuing=False)
- # This trick of importing the current module is for RQ workers to
- # correctly unpickle the `processor` function.
- from webapp import publish
-
- pubsub = publish.Publisher.open(config.pubsub_channel)
- streamer = Streamer(publish.processor, pubsub=pubsub, is_queuing=True,
- cache_length=100)
-
tweets = Tweets()
streamer.run(tweets.with_coordinates())
if __name__ == "__main__":
start()
|
93f2ed85d32efd88f38b3b123bc99e6afe120ae3
|
apps/payment/appconfig.py
|
apps/payment/appconfig.py
|
from django.apps import AppConfig
class PaymentConfig(AppConfig):
name = 'apps.payment'
verbose_name = 'Payment'
def ready(self):
super(PaymentConfig, self).ready()
from reversion import revisions as reversion
from apps.payment.models import PaymentPrice, PaymentTransaction
reversion.register(PaymentPrice)
reversion.register(PaymentTransaction)
|
from django.apps import AppConfig
class PaymentConfig(AppConfig):
name = 'apps.payment'
verbose_name = 'Payment'
def ready(self):
super(PaymentConfig, self).ready()
from reversion import revisions as reversion
from apps.payment.models import PaymentTransaction
reversion.register(PaymentTransaction)
|
Fix registering for already registered model
|
Fix registering for already registered model
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
from django.apps import AppConfig
class PaymentConfig(AppConfig):
name = 'apps.payment'
verbose_name = 'Payment'
def ready(self):
super(PaymentConfig, self).ready()
from reversion import revisions as reversion
- from apps.payment.models import PaymentPrice, PaymentTransaction
+ from apps.payment.models import PaymentTransaction
- reversion.register(PaymentPrice)
reversion.register(PaymentTransaction)
|
Fix registering for already registered model
|
## Code Before:
from django.apps import AppConfig
class PaymentConfig(AppConfig):
name = 'apps.payment'
verbose_name = 'Payment'
def ready(self):
super(PaymentConfig, self).ready()
from reversion import revisions as reversion
from apps.payment.models import PaymentPrice, PaymentTransaction
reversion.register(PaymentPrice)
reversion.register(PaymentTransaction)
## Instruction:
Fix registering for already registered model
## Code After:
from django.apps import AppConfig
class PaymentConfig(AppConfig):
name = 'apps.payment'
verbose_name = 'Payment'
def ready(self):
super(PaymentConfig, self).ready()
from reversion import revisions as reversion
from apps.payment.models import PaymentTransaction
reversion.register(PaymentTransaction)
|
from django.apps import AppConfig
class PaymentConfig(AppConfig):
name = 'apps.payment'
verbose_name = 'Payment'
def ready(self):
super(PaymentConfig, self).ready()
from reversion import revisions as reversion
- from apps.payment.models import PaymentPrice, PaymentTransaction
? --------------
+ from apps.payment.models import PaymentTransaction
- reversion.register(PaymentPrice)
reversion.register(PaymentTransaction)
|
3b83e983f1e7cd3a0866109c66dd91903db87fa1
|
goodtablesio/integrations/github/tasks/repos.py
|
goodtablesio/integrations/github/tasks/repos.py
|
import datetime
from goodtablesio.models.user import User
from goodtablesio.models.source import Source
from goodtablesio.services import database
from goodtablesio.celery_app import celery_app
from goodtablesio.integrations.github.utils.repos import iter_repos_by_token
@celery_app.task(name='goodtablesio.github.sync_user_repos')
def sync_user_repos(user_id, token):
"""Sync user repositories.
"""
user = database['session'].query(User).get(user_id)
for repo_data in iter_repos_by_token(token):
repo = database['session'].query(Source).filter(
Source.conf['github_id'].astext == repo_data['conf']['github_id']
).one_or_none()
if repo is None:
repo = Source(**repo_data)
database['session'].add(repo)
repo.active = repo_data['active']
repo.updated = datetime.datetime.utcnow(),
repo.users.append(user)
database['session'].commit()
|
import datetime
from goodtablesio.models.user import User
from goodtablesio.services import database
from goodtablesio.celery_app import celery_app
from goodtablesio.integrations.github.models.repo import GithubRepo
from goodtablesio.integrations.github.utils.repos import iter_repos_by_token
@celery_app.task(name='goodtablesio.github.sync_user_repos')
def sync_user_repos(user_id, token):
"""Sync user repositories.
"""
user = database['session'].query(User).get(user_id)
for repo_data in iter_repos_by_token(token):
repo = database['session'].query(GithubRepo).filter(
GithubRepo.conf['github_id'].astext == repo_data['conf']['github_id']
).one_or_none()
if repo is None:
repo = GithubRepo(**repo_data)
database['session'].add(repo)
repo.active = repo_data['active']
repo.updated = datetime.datetime.utcnow(),
repo.users.append(user)
database['session'].commit()
|
Use own model class on github task
|
Use own model class on github task
|
Python
|
agpl-3.0
|
frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io
|
import datetime
from goodtablesio.models.user import User
- from goodtablesio.models.source import Source
from goodtablesio.services import database
from goodtablesio.celery_app import celery_app
+ from goodtablesio.integrations.github.models.repo import GithubRepo
from goodtablesio.integrations.github.utils.repos import iter_repos_by_token
@celery_app.task(name='goodtablesio.github.sync_user_repos')
def sync_user_repos(user_id, token):
"""Sync user repositories.
"""
user = database['session'].query(User).get(user_id)
for repo_data in iter_repos_by_token(token):
- repo = database['session'].query(Source).filter(
+ repo = database['session'].query(GithubRepo).filter(
- Source.conf['github_id'].astext == repo_data['conf']['github_id']
+ GithubRepo.conf['github_id'].astext == repo_data['conf']['github_id']
).one_or_none()
if repo is None:
- repo = Source(**repo_data)
+ repo = GithubRepo(**repo_data)
database['session'].add(repo)
repo.active = repo_data['active']
repo.updated = datetime.datetime.utcnow(),
repo.users.append(user)
database['session'].commit()
|
Use own model class on github task
|
## Code Before:
import datetime
from goodtablesio.models.user import User
from goodtablesio.models.source import Source
from goodtablesio.services import database
from goodtablesio.celery_app import celery_app
from goodtablesio.integrations.github.utils.repos import iter_repos_by_token
@celery_app.task(name='goodtablesio.github.sync_user_repos')
def sync_user_repos(user_id, token):
"""Sync user repositories.
"""
user = database['session'].query(User).get(user_id)
for repo_data in iter_repos_by_token(token):
repo = database['session'].query(Source).filter(
Source.conf['github_id'].astext == repo_data['conf']['github_id']
).one_or_none()
if repo is None:
repo = Source(**repo_data)
database['session'].add(repo)
repo.active = repo_data['active']
repo.updated = datetime.datetime.utcnow(),
repo.users.append(user)
database['session'].commit()
## Instruction:
Use own model class on github task
## Code After:
import datetime
from goodtablesio.models.user import User
from goodtablesio.services import database
from goodtablesio.celery_app import celery_app
from goodtablesio.integrations.github.models.repo import GithubRepo
from goodtablesio.integrations.github.utils.repos import iter_repos_by_token
@celery_app.task(name='goodtablesio.github.sync_user_repos')
def sync_user_repos(user_id, token):
"""Sync user repositories.
"""
user = database['session'].query(User).get(user_id)
for repo_data in iter_repos_by_token(token):
repo = database['session'].query(GithubRepo).filter(
GithubRepo.conf['github_id'].astext == repo_data['conf']['github_id']
).one_or_none()
if repo is None:
repo = GithubRepo(**repo_data)
database['session'].add(repo)
repo.active = repo_data['active']
repo.updated = datetime.datetime.utcnow(),
repo.users.append(user)
database['session'].commit()
|
import datetime
from goodtablesio.models.user import User
- from goodtablesio.models.source import Source
from goodtablesio.services import database
from goodtablesio.celery_app import celery_app
+ from goodtablesio.integrations.github.models.repo import GithubRepo
from goodtablesio.integrations.github.utils.repos import iter_repos_by_token
@celery_app.task(name='goodtablesio.github.sync_user_repos')
def sync_user_repos(user_id, token):
"""Sync user repositories.
"""
user = database['session'].query(User).get(user_id)
for repo_data in iter_repos_by_token(token):
- repo = database['session'].query(Source).filter(
? ^ ----
+ repo = database['session'].query(GithubRepo).filter(
? ^^^^^^^^^
- Source.conf['github_id'].astext == repo_data['conf']['github_id']
? ^ ----
+ GithubRepo.conf['github_id'].astext == repo_data['conf']['github_id']
? ^^^^^^^^^
).one_or_none()
if repo is None:
- repo = Source(**repo_data)
? ^ ----
+ repo = GithubRepo(**repo_data)
? ^^^^^^^^^
database['session'].add(repo)
repo.active = repo_data['active']
repo.updated = datetime.datetime.utcnow(),
repo.users.append(user)
database['session'].commit()
|
f21ae3ffb99c5b90cb329317b2c6282e4992f6cc
|
safety/utils.py
|
safety/utils.py
|
import importlib
import re
import warnings
from django.conf import settings
from django.utils.translation import ugettext_lazy as _, ugettext
BROWSERS = (
(re.compile('Chrome'), _('Chrome')),
(re.compile('Safari'), _('Safari')),
(re.compile('Firefox'), _('Firefox')),
(re.compile('Opera'), _('Opera')),
(re.compile('IE'), _('Internet Explorer')),
)
DEVICES = (
(re.compile('Android'), _('Android')),
(re.compile('Linux'), _('Linux')),
(re.compile('iPhone'), _('iPhone')),
(re.compile('iPad'), _('iPad')),
(re.compile('(Mac OS X)'), _('OS X')),
(re.compile('NT 5.1'), _('Windows XP')),
(re.compile('NT 6.0'), _('Windows Vista')),
(re.compile('NT 6.1'), _('Windows 7')),
(re.compile('NT 6.2'), _('Windows 8')),
(re.compile('NT 6.3'), _('Windows 8.1')),
(re.compile('Windows'), _('Windows')),
)
def get_device(user_agent):
"""
Transform a User Agent into a human readable text.
"""
infos = []
for regex, name in BROWSERS:
if regex.search(user_agent):
infos.append('%s' % name)
break
for regex, name in DEVICES:
if regex.search(user_agent):
infos.append('%s' % name)
break
return ', '.join(infos)
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
engine = importlib.import_module(mod)
store = engine.SessionStore()
return store
|
try:
from django.utils.importlib import import_module
except ImportError:
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
engine = import_module(mod)
store = engine.SessionStore()
return store
def get_resolver(request, setting):
module_path = getattr(app_settings, setting)
try:
module, attribute = module_path.rsplit('.', 1)
resolver_module = import_module(module)
resolver = getattr(resolver_module, attribute)
except ImportError:
raise ImproperlyConfigured(
"Please specify a valid %s module. "
"Could not find %s " % (setting, module))
except AttributeError:
raise ImproperlyConfigured(
"Please specify a valid %s "
"function. Could not find %s function in module %s" %
(setting, attribute, module))
return resolver(request)
|
Add get_resolver() util and remove get_device() (now use ua-parser).
|
Add get_resolver() util and remove get_device() (now use ua-parser).
|
Python
|
mit
|
ulule/django-safety,ulule/django-safety
|
- import importlib
- import re
- import warnings
+ try:
+ from django.utils.importlib import import_module
+ except ImportError:
+ from importlib import import_module
from django.conf import settings
+ from django.core.exceptions import ImproperlyConfigured
- from django.utils.translation import ugettext_lazy as _, ugettext
-
-
- BROWSERS = (
- (re.compile('Chrome'), _('Chrome')),
- (re.compile('Safari'), _('Safari')),
- (re.compile('Firefox'), _('Firefox')),
- (re.compile('Opera'), _('Opera')),
- (re.compile('IE'), _('Internet Explorer')),
- )
-
- DEVICES = (
- (re.compile('Android'), _('Android')),
- (re.compile('Linux'), _('Linux')),
- (re.compile('iPhone'), _('iPhone')),
- (re.compile('iPad'), _('iPad')),
- (re.compile('(Mac OS X)'), _('OS X')),
- (re.compile('NT 5.1'), _('Windows XP')),
- (re.compile('NT 6.0'), _('Windows Vista')),
- (re.compile('NT 6.1'), _('Windows 7')),
- (re.compile('NT 6.2'), _('Windows 8')),
- (re.compile('NT 6.3'), _('Windows 8.1')),
- (re.compile('Windows'), _('Windows')),
- )
-
-
- def get_device(user_agent):
- """
- Transform a User Agent into a human readable text.
- """
- infos = []
-
- for regex, name in BROWSERS:
- if regex.search(user_agent):
- infos.append('%s' % name)
- break
-
- for regex, name in DEVICES:
- if regex.search(user_agent):
- infos.append('%s' % name)
- break
-
- return ', '.join(infos)
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
- engine = importlib.import_module(mod)
+ engine = import_module(mod)
store = engine.SessionStore()
return store
+
+ def get_resolver(request, setting):
+ module_path = getattr(app_settings, setting)
+
+ try:
+ module, attribute = module_path.rsplit('.', 1)
+ resolver_module = import_module(module)
+ resolver = getattr(resolver_module, attribute)
+
+ except ImportError:
+ raise ImproperlyConfigured(
+ "Please specify a valid %s module. "
+ "Could not find %s " % (setting, module))
+
+ except AttributeError:
+ raise ImproperlyConfigured(
+ "Please specify a valid %s "
+ "function. Could not find %s function in module %s" %
+ (setting, attribute, module))
+
+ return resolver(request)
+
|
Add get_resolver() util and remove get_device() (now use ua-parser).
|
## Code Before:
import importlib
import re
import warnings
from django.conf import settings
from django.utils.translation import ugettext_lazy as _, ugettext
BROWSERS = (
(re.compile('Chrome'), _('Chrome')),
(re.compile('Safari'), _('Safari')),
(re.compile('Firefox'), _('Firefox')),
(re.compile('Opera'), _('Opera')),
(re.compile('IE'), _('Internet Explorer')),
)
DEVICES = (
(re.compile('Android'), _('Android')),
(re.compile('Linux'), _('Linux')),
(re.compile('iPhone'), _('iPhone')),
(re.compile('iPad'), _('iPad')),
(re.compile('(Mac OS X)'), _('OS X')),
(re.compile('NT 5.1'), _('Windows XP')),
(re.compile('NT 6.0'), _('Windows Vista')),
(re.compile('NT 6.1'), _('Windows 7')),
(re.compile('NT 6.2'), _('Windows 8')),
(re.compile('NT 6.3'), _('Windows 8.1')),
(re.compile('Windows'), _('Windows')),
)
def get_device(user_agent):
"""
Transform a User Agent into a human readable text.
"""
infos = []
for regex, name in BROWSERS:
if regex.search(user_agent):
infos.append('%s' % name)
break
for regex, name in DEVICES:
if regex.search(user_agent):
infos.append('%s' % name)
break
return ', '.join(infos)
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
engine = importlib.import_module(mod)
store = engine.SessionStore()
return store
## Instruction:
Add get_resolver() util and remove get_device() (now use ua-parser).
## Code After:
try:
from django.utils.importlib import import_module
except ImportError:
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
engine = import_module(mod)
store = engine.SessionStore()
return store
def get_resolver(request, setting):
module_path = getattr(app_settings, setting)
try:
module, attribute = module_path.rsplit('.', 1)
resolver_module = import_module(module)
resolver = getattr(resolver_module, attribute)
except ImportError:
raise ImproperlyConfigured(
"Please specify a valid %s module. "
"Could not find %s " % (setting, module))
except AttributeError:
raise ImproperlyConfigured(
"Please specify a valid %s "
"function. Could not find %s function in module %s" %
(setting, attribute, module))
return resolver(request)
|
- import importlib
- import re
- import warnings
+ try:
+ from django.utils.importlib import import_module
+ except ImportError:
+ from importlib import import_module
from django.conf import settings
+ from django.core.exceptions import ImproperlyConfigured
- from django.utils.translation import ugettext_lazy as _, ugettext
-
-
- BROWSERS = (
- (re.compile('Chrome'), _('Chrome')),
- (re.compile('Safari'), _('Safari')),
- (re.compile('Firefox'), _('Firefox')),
- (re.compile('Opera'), _('Opera')),
- (re.compile('IE'), _('Internet Explorer')),
- )
-
- DEVICES = (
- (re.compile('Android'), _('Android')),
- (re.compile('Linux'), _('Linux')),
- (re.compile('iPhone'), _('iPhone')),
- (re.compile('iPad'), _('iPad')),
- (re.compile('(Mac OS X)'), _('OS X')),
- (re.compile('NT 5.1'), _('Windows XP')),
- (re.compile('NT 6.0'), _('Windows Vista')),
- (re.compile('NT 6.1'), _('Windows 7')),
- (re.compile('NT 6.2'), _('Windows 8')),
- (re.compile('NT 6.3'), _('Windows 8.1')),
- (re.compile('Windows'), _('Windows')),
- )
-
-
- def get_device(user_agent):
- """
- Transform a User Agent into a human readable text.
- """
- infos = []
-
- for regex, name in BROWSERS:
- if regex.search(user_agent):
- infos.append('%s' % name)
- break
-
- for regex, name in DEVICES:
- if regex.search(user_agent):
- infos.append('%s' % name)
- break
-
- return ', '.join(infos)
def get_session_store():
mod = getattr(settings, 'SESSION_ENGINE', 'django.contrib.sessions.backends.db')
- engine = importlib.import_module(mod)
? ----------
+ engine = import_module(mod)
store = engine.SessionStore()
return store
+
+
+ def get_resolver(request, setting):
+ module_path = getattr(app_settings, setting)
+
+ try:
+ module, attribute = module_path.rsplit('.', 1)
+ resolver_module = import_module(module)
+ resolver = getattr(resolver_module, attribute)
+
+ except ImportError:
+ raise ImproperlyConfigured(
+ "Please specify a valid %s module. "
+ "Could not find %s " % (setting, module))
+
+ except AttributeError:
+ raise ImproperlyConfigured(
+ "Please specify a valid %s "
+ "function. Could not find %s function in module %s" %
+ (setting, attribute, module))
+
+ return resolver(request)
|
dd8176f26addcf36419f1723448ab1e3ae8d0e89
|
metashare/repository/search_fields.py
|
metashare/repository/search_fields.py
|
from haystack.exceptions import SearchFieldError
from haystack.indexes import SearchField, CharField, MultiValueField
class LabeledField(SearchField):
"""
A kind of mixin class for creating `SearchField`s with a label.
"""
def __init__(self, label, **kwargs):
if label is None:
raise SearchFieldError("'{0}' fields must have a label." \
.format(self.__class__.__name__))
self.label = label
super(LabeledField, self).__init__(**kwargs)
class LabeledCharField(LabeledField, CharField):
"""
A `CharField` with a label.
"""
pass
class LabeledMultiValueField(LabeledField, MultiValueField):
"""
A `MultiValueField` with a label.
"""
pass
|
from haystack.exceptions import SearchFieldError
from haystack.indexes import SearchField, CharField, MultiValueField
class LabeledField(SearchField):
"""
A kind of mixin class for creating `SearchField`s with a label.
"""
def __init__(self, label, facet_id, parent_id, **kwargs):
if label is None:
raise SearchFieldError("'{0}' fields must have a label." \
.format(self.__class__.__name__))
self.label = label
self.facet_id = facet_id
self.parent_id = parent_id
super(LabeledField, self).__init__(**kwargs)
class LabeledCharField(LabeledField, CharField):
"""
A `CharField` with a label.
"""
pass
class LabeledMultiValueField(LabeledField, MultiValueField):
"""
A `MultiValueField` with a label.
"""
pass
|
Order facets and add sub facet feature
|
Order facets and add sub facet feature
|
Python
|
bsd-3-clause
|
MiltosD/CEF-ELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,zeehio/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE,MiltosD/CEFELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,zeehio/META-SHARE,zeehio/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/CEF-ELRC,zeehio/META-SHARE,MiltosD/CEF-ELRC,MiltosD/CEFELRC,JuliBakagianni/CEF-ELRC,JuliBakagianni/CEF-ELRC,JuliBakagianni/META-SHARE,zeehio/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE,JuliBakagianni/META-SHARE,zeehio/META-SHARE,MiltosD/CEFELRC,JuliBakagianni/CEF-ELRC,MiltosD/CEFELRC,JuliBakagianni/META-SHARE,JuliBakagianni/META-SHARE,zeehio/META-SHARE,MiltosD/CEF-ELRC,JuliBakagianni/META-SHARE
|
from haystack.exceptions import SearchFieldError
from haystack.indexes import SearchField, CharField, MultiValueField
class LabeledField(SearchField):
"""
A kind of mixin class for creating `SearchField`s with a label.
"""
- def __init__(self, label, **kwargs):
+ def __init__(self, label, facet_id, parent_id, **kwargs):
if label is None:
raise SearchFieldError("'{0}' fields must have a label." \
.format(self.__class__.__name__))
self.label = label
+ self.facet_id = facet_id
+ self.parent_id = parent_id
super(LabeledField, self).__init__(**kwargs)
class LabeledCharField(LabeledField, CharField):
"""
A `CharField` with a label.
"""
pass
class LabeledMultiValueField(LabeledField, MultiValueField):
"""
A `MultiValueField` with a label.
"""
pass
|
Order facets and add sub facet feature
|
## Code Before:
from haystack.exceptions import SearchFieldError
from haystack.indexes import SearchField, CharField, MultiValueField
class LabeledField(SearchField):
"""
A kind of mixin class for creating `SearchField`s with a label.
"""
def __init__(self, label, **kwargs):
if label is None:
raise SearchFieldError("'{0}' fields must have a label." \
.format(self.__class__.__name__))
self.label = label
super(LabeledField, self).__init__(**kwargs)
class LabeledCharField(LabeledField, CharField):
"""
A `CharField` with a label.
"""
pass
class LabeledMultiValueField(LabeledField, MultiValueField):
"""
A `MultiValueField` with a label.
"""
pass
## Instruction:
Order facets and add sub facet feature
## Code After:
from haystack.exceptions import SearchFieldError
from haystack.indexes import SearchField, CharField, MultiValueField
class LabeledField(SearchField):
"""
A kind of mixin class for creating `SearchField`s with a label.
"""
def __init__(self, label, facet_id, parent_id, **kwargs):
if label is None:
raise SearchFieldError("'{0}' fields must have a label." \
.format(self.__class__.__name__))
self.label = label
self.facet_id = facet_id
self.parent_id = parent_id
super(LabeledField, self).__init__(**kwargs)
class LabeledCharField(LabeledField, CharField):
"""
A `CharField` with a label.
"""
pass
class LabeledMultiValueField(LabeledField, MultiValueField):
"""
A `MultiValueField` with a label.
"""
pass
|
from haystack.exceptions import SearchFieldError
from haystack.indexes import SearchField, CharField, MultiValueField
class LabeledField(SearchField):
"""
A kind of mixin class for creating `SearchField`s with a label.
"""
- def __init__(self, label, **kwargs):
+ def __init__(self, label, facet_id, parent_id, **kwargs):
? +++++++++++++++++++++
if label is None:
raise SearchFieldError("'{0}' fields must have a label." \
.format(self.__class__.__name__))
self.label = label
+ self.facet_id = facet_id
+ self.parent_id = parent_id
super(LabeledField, self).__init__(**kwargs)
class LabeledCharField(LabeledField, CharField):
"""
A `CharField` with a label.
"""
pass
class LabeledMultiValueField(LabeledField, MultiValueField):
"""
A `MultiValueField` with a label.
"""
pass
|
718bd57ff648d431d8986a48d1c66877098c4081
|
urls.py
|
urls.py
|
from django.conf.urls import patterns, include, url
from . import methods
urlpatterns = patterns('',
url(r'^crashreport\/submit\.php$', methods.post_crashreport, name='post_crashreport'),
url(r'^issues\.xml$', methods.post_issue, name='post_issue'),
)
|
from django.conf.urls import include, url
from . import methods
urlpatterns = (
url(r'^crashreport\/submit\.php$', methods.post_crashreport, name='post_crashreport'),
url(r'^issues\.xml$', methods.post_issue, name='post_issue'),
)
|
Update to Django 1.11.19 including updates to various dependencies
|
Update to Django 1.11.19 including updates to various dependencies
|
Python
|
mit
|
mback2k/django-app-bugs
|
- from django.conf.urls import patterns, include, url
+ from django.conf.urls import include, url
from . import methods
- urlpatterns = patterns('',
+ urlpatterns = (
url(r'^crashreport\/submit\.php$', methods.post_crashreport, name='post_crashreport'),
url(r'^issues\.xml$', methods.post_issue, name='post_issue'),
)
|
Update to Django 1.11.19 including updates to various dependencies
|
## Code Before:
from django.conf.urls import patterns, include, url
from . import methods
urlpatterns = patterns('',
url(r'^crashreport\/submit\.php$', methods.post_crashreport, name='post_crashreport'),
url(r'^issues\.xml$', methods.post_issue, name='post_issue'),
)
## Instruction:
Update to Django 1.11.19 including updates to various dependencies
## Code After:
from django.conf.urls import include, url
from . import methods
urlpatterns = (
url(r'^crashreport\/submit\.php$', methods.post_crashreport, name='post_crashreport'),
url(r'^issues\.xml$', methods.post_issue, name='post_issue'),
)
|
- from django.conf.urls import patterns, include, url
? ----------
+ from django.conf.urls import include, url
from . import methods
- urlpatterns = patterns('',
+ urlpatterns = (
url(r'^crashreport\/submit\.php$', methods.post_crashreport, name='post_crashreport'),
url(r'^issues\.xml$', methods.post_issue, name='post_issue'),
)
|
eb2699b6050534045b95e5ea78cb0ea68de474ed
|
website/members/apps.py
|
website/members/apps.py
|
from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
Make members verbose name translatable
|
:speech_balloon: Make members verbose name translatable
|
Python
|
agpl-3.0
|
Dekker1/moore,UTNkar/moore,Dekker1/moore,Dekker1/moore,UTNkar/moore,UTNkar/moore,UTNkar/moore,Dekker1/moore
|
from django.apps import AppConfig
+ from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
- verbose_name = 'UTN Member Management'
+ verbose_name = _('UTN Member Management')
|
Make members verbose name translatable
|
## Code Before:
from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
## Instruction:
Make members verbose name translatable
## Code After:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
from django.apps import AppConfig
+ from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
- verbose_name = 'UTN Member Management'
+ verbose_name = _('UTN Member Management')
? ++ +
|
4a32fe3b3735df9ec56a4ca1769268740ef5d8f4
|
tests/test_to_html.py
|
tests/test_to_html.py
|
from __future__ import unicode_literals
import unittest
class ToHTMLTest(unittest.TestCase):
SAMPLE = "Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки."
def setUp(self):
from paka.cmark import to_html
self.func = to_html
def check(self, source, expected, **kwargs):
self.assertEqual(self.func(source, **kwargs), expected)
def test_empty(self):
self.check("", "")
def test_ascii(self):
self.check("Hello, Noob!", "<p>Hello, Noob!</p>\n")
def test_non_ascii(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>. И другие штуки.</p>\n"))
def test_breaks(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"),
breaks=True)
|
from __future__ import unicode_literals
import unittest
class ToHTMLTest(unittest.TestCase):
SAMPLE = (
"Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки.\n\n"
"<p>Test of <em>HTML</em>.</p>")
def setUp(self):
from paka.cmark import to_html
self.func = to_html
def check(self, source, expected, **kwargs):
self.assertEqual(self.func(source, **kwargs), expected)
def test_empty(self):
self.check("", "")
def test_ascii(self):
self.check("Hello, Noob!", "<p>Hello, Noob!</p>\n")
def test_non_ascii(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>. И другие штуки.</p>\n"
"<p>Test of <em>HTML</em>.</p>\n"))
def test_breaks(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"
"<p>Test of <em>HTML</em>.</p>\n"),
breaks=True)
|
Add HTML fragment to test sample
|
Add HTML fragment to test sample
|
Python
|
bsd-3-clause
|
PavloKapyshin/paka.cmark,PavloKapyshin/paka.cmark,PavloKapyshin/paka.cmark
|
from __future__ import unicode_literals
import unittest
class ToHTMLTest(unittest.TestCase):
+ SAMPLE = (
- SAMPLE = "Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки."
+ "Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки.\n\n"
+ "<p>Test of <em>HTML</em>.</p>")
def setUp(self):
from paka.cmark import to_html
self.func = to_html
def check(self, source, expected, **kwargs):
self.assertEqual(self.func(source, **kwargs), expected)
def test_empty(self):
self.check("", "")
def test_ascii(self):
self.check("Hello, Noob!", "<p>Hello, Noob!</p>\n")
def test_non_ascii(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
- "<p>Вставляем <code>код</code>. И другие штуки.</p>\n"))
+ "<p>Вставляем <code>код</code>. И другие штуки.</p>\n"
+ "<p>Test of <em>HTML</em>.</p>\n"))
def test_breaks(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
- "<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"),
+ "<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"
+ "<p>Test of <em>HTML</em>.</p>\n"),
breaks=True)
|
Add HTML fragment to test sample
|
## Code Before:
from __future__ import unicode_literals
import unittest
class ToHTMLTest(unittest.TestCase):
SAMPLE = "Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки."
def setUp(self):
from paka.cmark import to_html
self.func = to_html
def check(self, source, expected, **kwargs):
self.assertEqual(self.func(source, **kwargs), expected)
def test_empty(self):
self.check("", "")
def test_ascii(self):
self.check("Hello, Noob!", "<p>Hello, Noob!</p>\n")
def test_non_ascii(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>. И другие штуки.</p>\n"))
def test_breaks(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"),
breaks=True)
## Instruction:
Add HTML fragment to test sample
## Code After:
from __future__ import unicode_literals
import unittest
class ToHTMLTest(unittest.TestCase):
SAMPLE = (
"Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки.\n\n"
"<p>Test of <em>HTML</em>.</p>")
def setUp(self):
from paka.cmark import to_html
self.func = to_html
def check(self, source, expected, **kwargs):
self.assertEqual(self.func(source, **kwargs), expected)
def test_empty(self):
self.check("", "")
def test_ascii(self):
self.check("Hello, Noob!", "<p>Hello, Noob!</p>\n")
def test_non_ascii(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>. И другие штуки.</p>\n"
"<p>Test of <em>HTML</em>.</p>\n"))
def test_breaks(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
"<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"
"<p>Test of <em>HTML</em>.</p>\n"),
breaks=True)
|
from __future__ import unicode_literals
import unittest
class ToHTMLTest(unittest.TestCase):
+ SAMPLE = (
- SAMPLE = "Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки."
? ------ ^
+ "Проверяем *CommonMark*.\n\nВставляем `код`.\nИ другие штуки.\n\n"
? ^^ ++++
+ "<p>Test of <em>HTML</em>.</p>")
def setUp(self):
from paka.cmark import to_html
self.func = to_html
def check(self, source, expected, **kwargs):
self.assertEqual(self.func(source, **kwargs), expected)
def test_empty(self):
self.check("", "")
def test_ascii(self):
self.check("Hello, Noob!", "<p>Hello, Noob!</p>\n")
def test_non_ascii(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
- "<p>Вставляем <code>код</code>. И другие штуки.</p>\n"))
? --
+ "<p>Вставляем <code>код</code>. И другие штуки.</p>\n"
+ "<p>Test of <em>HTML</em>.</p>\n"))
def test_breaks(self):
self.check(
self.SAMPLE,
(
"<p>Проверяем <em>CommonMark</em>.</p>\n"
- "<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"),
? --
+ "<p>Вставляем <code>код</code>.\nИ другие штуки.</p>\n"
+ "<p>Test of <em>HTML</em>.</p>\n"),
breaks=True)
|
7a7de7b7a44180f4ea3b6d5b3334ce406eb72b38
|
discussion/migrations/0002_discussionthread_updated.py
|
discussion/migrations/0002_discussionthread_updated.py
|
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
|
Fix because we're not timezone aware.
|
Fix because we're not timezone aware.
|
Python
|
mit
|
btomaszewski/webdoctor-server
|
from __future__ import unicode_literals
from django.db import models, migrations
+ from django.utils import timezone
- import datetime
- from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
- field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
+ field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
|
Fix because we're not timezone aware.
|
## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
preserve_default=False,
),
]
## Instruction:
Fix because we're not timezone aware.
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
|
from __future__ import unicode_literals
from django.db import models, migrations
+ from django.utils import timezone
- import datetime
- from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('discussion', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='discussionthread',
name='updated',
- field=models.DateTimeField(default=datetime.datetime(2015, 7, 31, 15, 51, 36, 361733, tzinfo=utc), auto_now=True),
+ field=models.DateTimeField(default=timezone.now(), auto_now=True),
preserve_default=False,
),
]
|
f55af10f1767d39fdba65fb4c17beee526f96748
|
lib/__init__.py
|
lib/__init__.py
|
"""retriever.lib contains the core EcoData Retriever modules."""
|
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
|
Check for and use system proxies for downloading files
|
Check for and use system proxies for downloading files
In some cases when the user is using a proxy urlib.urlopen() will fail to successfully open https files. This prevents the retriever from accessing the scripts stored on GitHub and causes the installation to fail (see #268). This change checks for the existence of proxies and makes them available in a way that urllib.urlopen() can find them
|
Python
|
mit
|
embaldridge/retriever,davharris/retriever,davharris/retriever,davharris/retriever,embaldridge/retriever,goelakash/retriever,henrykironde/deletedret,goelakash/retriever,henrykironde/deletedret,embaldridge/retriever
|
+
"""retriever.lib contains the core EcoData Retriever modules."""
+ import os
+ def set_proxy():
+ proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
+ for proxy in proxies:
+ if os.getenv(proxy):
+ if len(os.environ[proxy]) != 0:
+ for i in proxies:
+ os.environ[i] = os.environ[proxy]
+ break
+
+ set_proxy()
+
|
Check for and use system proxies for downloading files
|
## Code Before:
"""retriever.lib contains the core EcoData Retriever modules."""
## Instruction:
Check for and use system proxies for downloading files
## Code After:
"""retriever.lib contains the core EcoData Retriever modules."""
import os
def set_proxy():
proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
for proxy in proxies:
if os.getenv(proxy):
if len(os.environ[proxy]) != 0:
for i in proxies:
os.environ[i] = os.environ[proxy]
break
set_proxy()
|
+
"""retriever.lib contains the core EcoData Retriever modules."""
+ import os
+
+ def set_proxy():
+ proxies = ["https_proxy", "http_proxy", "ftp_proxy", "HTTP_PROXY", "HTTPS_PROXY", "FTP_PROXY"]
+ for proxy in proxies:
+ if os.getenv(proxy):
+ if len(os.environ[proxy]) != 0:
+ for i in proxies:
+ os.environ[i] = os.environ[proxy]
+ break
+
+ set_proxy()
|
3e913e4267fd7750516edcbed1aa687e0cbd17fe
|
edx_repo_tools/oep2/__init__.py
|
edx_repo_tools/oep2/__init__.py
|
import click
from . import explode_repos_yaml
from .report import cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
cli.add_command(cli.cli, 'report')
|
import click
from . import explode_repos_yaml
from .report.cli import cli as report_cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
cli.add_command(report_cli, 'report')
|
Make oep-2 checker run again
|
Make oep-2 checker run again
|
Python
|
apache-2.0
|
edx/repo-tools,edx/repo-tools
|
import click
from . import explode_repos_yaml
- from .report import cli
+ from .report.cli import cli as report_cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
- cli.add_command(cli.cli, 'report')
+ cli.add_command(report_cli, 'report')
|
Make oep-2 checker run again
|
## Code Before:
import click
from . import explode_repos_yaml
from .report import cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
cli.add_command(cli.cli, 'report')
## Instruction:
Make oep-2 checker run again
## Code After:
import click
from . import explode_repos_yaml
from .report.cli import cli as report_cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
cli.add_command(report_cli, 'report')
|
import click
from . import explode_repos_yaml
- from .report import cli
+ from .report.cli import cli as report_cli
def _cli():
cli(auto_envvar_prefix="OEP2")
@click.group()
def cli():
"""
Tools for implementing and enforcing OEP-2.
"""
pass
cli.add_command(explode_repos_yaml.explode)
cli.add_command(explode_repos_yaml.implode)
- cli.add_command(cli.cli, 'report')
? ^^^^
+ cli.add_command(report_cli, 'report')
? ^^^^^^^
|
5b43264321e4649312050264524a6df7682a6641
|
mfr/ext/md/tests/test_md.py
|
mfr/ext/md/tests/test_md.py
|
from mfr.ext.md import Handler, render
from mock import MagicMock
def test_render_html():
fakefile = MagicMock(spec=file)
fakefile.read.return_value = '# foo'
assert render.render_html(fakefile) == '<h1>foo</h1>'
fakefile.read.return_value = '_italic_'
assert render.render_html(fakefile) == '<p><em>italic</em></p>'
fakefile.read.return_value = '*italic*'
assert render.render_html(fakefile) == '<p><em>italic</em></p>'
fakefile.read.return_value = '''
* one
* two'''
assert render.render_html(fakefile) == '''<ul>
<li>one</li>
<li>two</li>
</ul>'''
def test_detect(fakefile):
test_handler=Handler()
fakefile.name='file.notmd'
assert test_handler.detect(fakefile) is False
fakefile.name='file.md'
assert test_handler.detect(fakefile) is True
fakefile.name='file.markdown'
assert test_handler.detect(fakefile) is True
|
from mfr.ext.md import Handler, render
from mock import MagicMock
def test_render_html():
fakefile = MagicMock(spec=file)
fakefile.read.return_value = '# foo'
assert render.render_html(fakefile).content == '<h1>foo</h1>'
fakefile.read.return_value = '_italic_'
assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
fakefile.read.return_value = '*italic*'
assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
fakefile.read.return_value = '''
* one
* two'''
assert render.render_html(fakefile).content == '''<ul>
<li>one</li>
<li>two</li>
</ul>'''
def test_detect(fakefile):
test_handler=Handler()
fakefile.name='file.notmd'
assert test_handler.detect(fakefile) is False
fakefile.name='file.md'
assert test_handler.detect(fakefile) is True
fakefile.name='file.markdown'
assert test_handler.detect(fakefile) is True
|
Update md test for render fix
|
Update md test for render fix
|
Python
|
apache-2.0
|
CenterForOpenScience/modular-file-renderer,mfraezz/modular-file-renderer,Johnetordoff/modular-file-renderer,AddisonSchiller/modular-file-renderer,mfraezz/modular-file-renderer,rdhyee/modular-file-renderer,icereval/modular-file-renderer,TomBaxter/modular-file-renderer,Johnetordoff/modular-file-renderer,rdhyee/modular-file-renderer,rdhyee/modular-file-renderer,Johnetordoff/modular-file-renderer,rdhyee/modular-file-renderer,AddisonSchiller/modular-file-renderer,icereval/modular-file-renderer,haoyuchen1992/modular-file-renderer,AddisonSchiller/modular-file-renderer,felliott/modular-file-renderer,Johnetordoff/modular-file-renderer,TomBaxter/modular-file-renderer,haoyuchen1992/modular-file-renderer,haoyuchen1992/modular-file-renderer,mfraezz/modular-file-renderer,TomBaxter/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,icereval/modular-file-renderer,haoyuchen1992/modular-file-renderer,felliott/modular-file-renderer,mfraezz/modular-file-renderer,TomBaxter/modular-file-renderer,CenterForOpenScience/modular-file-renderer,CenterForOpenScience/modular-file-renderer,AddisonSchiller/modular-file-renderer,felliott/modular-file-renderer
|
from mfr.ext.md import Handler, render
from mock import MagicMock
def test_render_html():
fakefile = MagicMock(spec=file)
fakefile.read.return_value = '# foo'
- assert render.render_html(fakefile) == '<h1>foo</h1>'
+ assert render.render_html(fakefile).content == '<h1>foo</h1>'
fakefile.read.return_value = '_italic_'
- assert render.render_html(fakefile) == '<p><em>italic</em></p>'
+ assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
fakefile.read.return_value = '*italic*'
- assert render.render_html(fakefile) == '<p><em>italic</em></p>'
+ assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
fakefile.read.return_value = '''
* one
* two'''
- assert render.render_html(fakefile) == '''<ul>
+ assert render.render_html(fakefile).content == '''<ul>
<li>one</li>
<li>two</li>
</ul>'''
def test_detect(fakefile):
test_handler=Handler()
fakefile.name='file.notmd'
assert test_handler.detect(fakefile) is False
fakefile.name='file.md'
assert test_handler.detect(fakefile) is True
fakefile.name='file.markdown'
assert test_handler.detect(fakefile) is True
|
Update md test for render fix
|
## Code Before:
from mfr.ext.md import Handler, render
from mock import MagicMock
def test_render_html():
fakefile = MagicMock(spec=file)
fakefile.read.return_value = '# foo'
assert render.render_html(fakefile) == '<h1>foo</h1>'
fakefile.read.return_value = '_italic_'
assert render.render_html(fakefile) == '<p><em>italic</em></p>'
fakefile.read.return_value = '*italic*'
assert render.render_html(fakefile) == '<p><em>italic</em></p>'
fakefile.read.return_value = '''
* one
* two'''
assert render.render_html(fakefile) == '''<ul>
<li>one</li>
<li>two</li>
</ul>'''
def test_detect(fakefile):
test_handler=Handler()
fakefile.name='file.notmd'
assert test_handler.detect(fakefile) is False
fakefile.name='file.md'
assert test_handler.detect(fakefile) is True
fakefile.name='file.markdown'
assert test_handler.detect(fakefile) is True
## Instruction:
Update md test for render fix
## Code After:
from mfr.ext.md import Handler, render
from mock import MagicMock
def test_render_html():
fakefile = MagicMock(spec=file)
fakefile.read.return_value = '# foo'
assert render.render_html(fakefile).content == '<h1>foo</h1>'
fakefile.read.return_value = '_italic_'
assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
fakefile.read.return_value = '*italic*'
assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
fakefile.read.return_value = '''
* one
* two'''
assert render.render_html(fakefile).content == '''<ul>
<li>one</li>
<li>two</li>
</ul>'''
def test_detect(fakefile):
test_handler=Handler()
fakefile.name='file.notmd'
assert test_handler.detect(fakefile) is False
fakefile.name='file.md'
assert test_handler.detect(fakefile) is True
fakefile.name='file.markdown'
assert test_handler.detect(fakefile) is True
|
from mfr.ext.md import Handler, render
from mock import MagicMock
def test_render_html():
fakefile = MagicMock(spec=file)
fakefile.read.return_value = '# foo'
- assert render.render_html(fakefile) == '<h1>foo</h1>'
+ assert render.render_html(fakefile).content == '<h1>foo</h1>'
? ++++++++
fakefile.read.return_value = '_italic_'
- assert render.render_html(fakefile) == '<p><em>italic</em></p>'
+ assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
? ++++++++
fakefile.read.return_value = '*italic*'
- assert render.render_html(fakefile) == '<p><em>italic</em></p>'
+ assert render.render_html(fakefile).content == '<p><em>italic</em></p>'
? ++++++++
fakefile.read.return_value = '''
* one
* two'''
- assert render.render_html(fakefile) == '''<ul>
+ assert render.render_html(fakefile).content == '''<ul>
? ++++++++
<li>one</li>
<li>two</li>
</ul>'''
def test_detect(fakefile):
test_handler=Handler()
fakefile.name='file.notmd'
assert test_handler.detect(fakefile) is False
fakefile.name='file.md'
assert test_handler.detect(fakefile) is True
fakefile.name='file.markdown'
assert test_handler.detect(fakefile) is True
|
eabc792a4ed87900ae1cb6a9404c3f85874cd053
|
avwx_api/views.py
|
avwx_api/views.py
|
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
|
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
Return 400 status for incomplete API queries
|
Return 400 status for incomplete API queries
|
Python
|
mit
|
flyinactor91/AVWX-API,flyinactor91/AVWX-API,flyinactor91/AVWX-API
|
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
- return jsonify({'Error': 'No report type given'})
+ return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
- return jsonify({'Error': 'No station given'})
+ return jsonify({'Error': 'No station given'}), 400
|
Return 400 status for incomplete API queries
|
## Code Before:
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
## Instruction:
Return 400 status for incomplete API queries
## Code After:
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
- return jsonify({'Error': 'No report type given'})
+ return jsonify({'Error': 'No report type given'}), 400
? +++++
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
- return jsonify({'Error': 'No station given'})
+ return jsonify({'Error': 'No station given'}), 400
? +++++
|
df216bdc25ef29da821f577a517ccdca61448cf4
|
django_lightweight_queue/middleware/logging.py
|
django_lightweight_queue/middleware/logging.py
|
from __future__ import absolute_import
import logging
import traceback
log = logging.getLogger(__name__)
class LoggingMiddleware(object):
def process_job(self, job):
log.info("Running job %s", job)
def process_result(self, job, result, duration):
log.info("Finished job %s => %r (Time taken: %.2fs)",
job,
result,
duration,
)
def process_exception(self, job, duration, *exc_info):
log.error("Exception when processing %r (duration: %.2fs): %s",
job,
duration,
''.join(traceback.format_exception(*exc_info)),
)
|
from __future__ import absolute_import
import logging
import traceback
log = logging.getLogger(__name__)
class LoggingMiddleware(object):
def process_job(self, job):
log.info("Running job %s", job)
def process_result(self, job, result, duration):
log.info("Finished job => %r (Time taken: %.2fs)",
result,
duration,
)
def process_exception(self, job, duration, *exc_info):
log.error("Exception when processing job (duration: %.2fs): %s",
duration,
''.join(traceback.format_exception(*exc_info)),
)
|
Save over 50% of logfile 'bloat' by not repeating all args on success/failure
|
Save over 50% of logfile 'bloat' by not repeating all args on success/failure
The data will be right above it just before we run the job.
|
Python
|
bsd-3-clause
|
prophile/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue,thread/django-lightweight-queue
|
from __future__ import absolute_import
import logging
import traceback
log = logging.getLogger(__name__)
class LoggingMiddleware(object):
def process_job(self, job):
log.info("Running job %s", job)
def process_result(self, job, result, duration):
- log.info("Finished job %s => %r (Time taken: %.2fs)",
+ log.info("Finished job => %r (Time taken: %.2fs)",
- job,
result,
duration,
)
def process_exception(self, job, duration, *exc_info):
- log.error("Exception when processing %r (duration: %.2fs): %s",
+ log.error("Exception when processing job (duration: %.2fs): %s",
- job,
duration,
''.join(traceback.format_exception(*exc_info)),
)
|
Save over 50% of logfile 'bloat' by not repeating all args on success/failure
|
## Code Before:
from __future__ import absolute_import
import logging
import traceback
log = logging.getLogger(__name__)
class LoggingMiddleware(object):
def process_job(self, job):
log.info("Running job %s", job)
def process_result(self, job, result, duration):
log.info("Finished job %s => %r (Time taken: %.2fs)",
job,
result,
duration,
)
def process_exception(self, job, duration, *exc_info):
log.error("Exception when processing %r (duration: %.2fs): %s",
job,
duration,
''.join(traceback.format_exception(*exc_info)),
)
## Instruction:
Save over 50% of logfile 'bloat' by not repeating all args on success/failure
## Code After:
from __future__ import absolute_import
import logging
import traceback
log = logging.getLogger(__name__)
class LoggingMiddleware(object):
def process_job(self, job):
log.info("Running job %s", job)
def process_result(self, job, result, duration):
log.info("Finished job => %r (Time taken: %.2fs)",
result,
duration,
)
def process_exception(self, job, duration, *exc_info):
log.error("Exception when processing job (duration: %.2fs): %s",
duration,
''.join(traceback.format_exception(*exc_info)),
)
|
from __future__ import absolute_import
import logging
import traceback
log = logging.getLogger(__name__)
class LoggingMiddleware(object):
def process_job(self, job):
log.info("Running job %s", job)
def process_result(self, job, result, duration):
- log.info("Finished job %s => %r (Time taken: %.2fs)",
? ---
+ log.info("Finished job => %r (Time taken: %.2fs)",
- job,
result,
duration,
)
def process_exception(self, job, duration, *exc_info):
- log.error("Exception when processing %r (duration: %.2fs): %s",
? ^^
+ log.error("Exception when processing job (duration: %.2fs): %s",
? ^^^
- job,
duration,
''.join(traceback.format_exception(*exc_info)),
)
|
77579ff7d7a63539d350c40d49eedeb21e61bd61
|
acute/schema.py
|
acute/schema.py
|
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
Make Diagnosis come before PMH
|
Make Diagnosis come before PMH
closes #5
|
Python
|
agpl-3.0
|
openhealthcare/acute,openhealthcare/acute,openhealthcare/acute
|
from acute import models
list_columns = [
models.Demographics,
models.Location,
+ models.Diagnosis,
models.PastMedicalHistory,
- models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
+ models.Diagnosis,
models.PastMedicalHistory,
- models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
+ models.Diagnosis,
models.PastMedicalHistory,
- models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
Make Diagnosis come before PMH
|
## Code Before:
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.PastMedicalHistory,
models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
## Instruction:
Make Diagnosis come before PMH
## Code After:
from acute import models
list_columns = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
models.Diagnosis,
models.PastMedicalHistory,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
from acute import models
list_columns = [
models.Demographics,
models.Location,
+ models.Diagnosis,
models.PastMedicalHistory,
- models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_take = [
models.Demographics,
models.Location,
models.Clerking,
+ models.Diagnosis,
models.PastMedicalHistory,
- models.Diagnosis,
models.Plan,
models.Rescuscitation
]
list_columns_specialist_teams = [
models.Demographics,
models.Location,
+ models.Diagnosis,
models.PastMedicalHistory,
- models.Diagnosis,
models.Plan,
models.Treatment,
models.Investigation,
models.DischargeDue,
]
list_schemas = {
'default': list_columns,
'take': {
'default': list_columns_take
},
'cardiology': {
'default': list_columns_specialist_teams
},
'respiratory': list_columns_specialist_teams
}
detail_columns = list_columns
|
52430087413e24c94a532e67a2c77248ecc0598c
|
saleor/core/extensions/checks.py
|
saleor/core/extensions/checks.py
|
import importlib
from typing import List
from django.conf import settings
from django.core.checks import Error, register
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
manager_path, _, manager_name = settings.EXTENSIONS_MANAGER.rpartition(".")
try:
manager_module = importlib.import_module(manager_path)
except ModuleNotFoundError:
errors.append(Error("Extension Manager path: %s doesn't exist" % manager_path))
else:
manager_class = getattr(manager_module, manager_name, None)
if not manager_class:
errors.append(
Error(
"Extension Manager %s doesn't exists in specific path %s"
% (manager_name, str(manager_module))
)
)
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
plugin_path, _, plugin_name = plugin_path.rpartition(".")
try:
plugin_module = importlib.import_module(plugin_path)
except ModuleNotFoundError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
else:
plugin_class = getattr(plugin_module, plugin_name, None)
if not plugin_class:
errors.append(
Error(
"Plugin %s doesn't exists in specific path %s"
% (plugin_name, str(plugin_module))
)
)
|
from typing import List
from django.conf import settings
from django.core.checks import Error, register
from django.utils.module_loading import import_string
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
try:
import_string(settings.EXTENSIONS_MANAGER)
except ImportError:
errors.append(
Error(
"Extension Manager path: %s doesn't exist" % settings.EXTENSIONS_MANAGER
)
)
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
try:
import_string(plugin_path)
except ImportError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
|
Use django helper to validate manager and plugins paths
|
Use django helper to validate manager and plugins paths
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor,maferelo/saleor,maferelo/saleor,maferelo/saleor
|
- import importlib
from typing import List
from django.conf import settings
from django.core.checks import Error, register
+ from django.utils.module_loading import import_string
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
- manager_path, _, manager_name = settings.EXTENSIONS_MANAGER.rpartition(".")
+
try:
+ import_string(settings.EXTENSIONS_MANAGER)
+ except ImportError:
- manager_module = importlib.import_module(manager_path)
- except ModuleNotFoundError:
- errors.append(Error("Extension Manager path: %s doesn't exist" % manager_path))
- else:
- manager_class = getattr(manager_module, manager_name, None)
- if not manager_class:
- errors.append(
+ errors.append(
- Error(
+ Error(
+ "Extension Manager path: %s doesn't exist" % settings.EXTENSIONS_MANAGER
- "Extension Manager %s doesn't exists in specific path %s"
- % (manager_name, str(manager_module))
- )
)
+ )
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
- plugin_path, _, plugin_name = plugin_path.rpartition(".")
try:
- plugin_module = importlib.import_module(plugin_path)
- except ModuleNotFoundError:
+ import_string(plugin_path)
+ except ImportError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
- else:
- plugin_class = getattr(plugin_module, plugin_name, None)
- if not plugin_class:
- errors.append(
- Error(
- "Plugin %s doesn't exists in specific path %s"
- % (plugin_name, str(plugin_module))
- )
- )
|
Use django helper to validate manager and plugins paths
|
## Code Before:
import importlib
from typing import List
from django.conf import settings
from django.core.checks import Error, register
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
manager_path, _, manager_name = settings.EXTENSIONS_MANAGER.rpartition(".")
try:
manager_module = importlib.import_module(manager_path)
except ModuleNotFoundError:
errors.append(Error("Extension Manager path: %s doesn't exist" % manager_path))
else:
manager_class = getattr(manager_module, manager_name, None)
if not manager_class:
errors.append(
Error(
"Extension Manager %s doesn't exists in specific path %s"
% (manager_name, str(manager_module))
)
)
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
plugin_path, _, plugin_name = plugin_path.rpartition(".")
try:
plugin_module = importlib.import_module(plugin_path)
except ModuleNotFoundError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
else:
plugin_class = getattr(plugin_module, plugin_name, None)
if not plugin_class:
errors.append(
Error(
"Plugin %s doesn't exists in specific path %s"
% (plugin_name, str(plugin_module))
)
)
## Instruction:
Use django helper to validate manager and plugins paths
## Code After:
from typing import List
from django.conf import settings
from django.core.checks import Error, register
from django.utils.module_loading import import_string
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
try:
import_string(settings.EXTENSIONS_MANAGER)
except ImportError:
errors.append(
Error(
"Extension Manager path: %s doesn't exist" % settings.EXTENSIONS_MANAGER
)
)
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
try:
import_string(plugin_path)
except ImportError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
|
- import importlib
from typing import List
from django.conf import settings
from django.core.checks import Error, register
+ from django.utils.module_loading import import_string
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
- manager_path, _, manager_name = settings.EXTENSIONS_MANAGER.rpartition(".")
+
try:
+ import_string(settings.EXTENSIONS_MANAGER)
+ except ImportError:
- manager_module = importlib.import_module(manager_path)
- except ModuleNotFoundError:
- errors.append(Error("Extension Manager path: %s doesn't exist" % manager_path))
- else:
- manager_class = getattr(manager_module, manager_name, None)
- if not manager_class:
- errors.append(
? ----
+ errors.append(
- Error(
? ----
+ Error(
+ "Extension Manager path: %s doesn't exist" % settings.EXTENSIONS_MANAGER
- "Extension Manager %s doesn't exists in specific path %s"
- % (manager_name, str(manager_module))
- )
)
+ )
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
- plugin_path, _, plugin_name = plugin_path.rpartition(".")
try:
- plugin_module = importlib.import_module(plugin_path)
- except ModuleNotFoundError:
+ import_string(plugin_path)
+ except ImportError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
- else:
- plugin_class = getattr(plugin_module, plugin_name, None)
- if not plugin_class:
- errors.append(
- Error(
- "Plugin %s doesn't exists in specific path %s"
- % (plugin_name, str(plugin_module))
- )
- )
|
e01e5cca84c3eeb04d20b3a91bbb44d688418bf3
|
pypaas/sshkey.py
|
pypaas/sshkey.py
|
import os.path
import sys
from . import util
class SSHKey(object):
@staticmethod
def rebuild_authorized_keys():
lines = []
ssh_dir = os.path.expanduser('~/.ssh')
util.mkdir_p(os.path.join(ssh_dir, 'authorized_keys.d'))
for name in os.listdir(os.path.join(ssh_dir, 'authorized_keys.d')):
key = open(os.path.join(ssh_dir, 'authorized_keys.d', name)).read()
keyparts = key.split()
assert keyparts[0].startswith('ssh-')
key = ' '.join(keyparts[:2])
name = name.replace('.pub', '')
lines.append(
('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
'no-port-forwarding {key} {name}').format(
pypaas_cmd=os.path.join(
os.path.dirname(sys.executable), 'pypaas'
),
key=key,
name=name
)
)
util.replace_file(os.path.join(ssh_dir, 'authorized_keys'),
'\n'.join(lines)+'\n')
|
import os.path
import sys
from . import util
class SSHKey(object):
@staticmethod
def rebuild_authorized_keys():
lines = []
ssh_dir = os.path.expanduser('~/.ssh')
util.mkdir_p(os.path.join(ssh_dir, 'authorized_keys.d'))
for name in os.listdir(os.path.join(ssh_dir, 'authorized_keys.d')):
keyfilename = os.path.join(ssh_dir, 'authorized_keys.d', name)
with open(keyfilename) as keyfile:
for key in keyfile:
keyparts = key.split()
assert keyparts[0].startswith('ssh-')
key = ' '.join(keyparts[:2])
name = name.replace('.pub', '')
lines.append(
('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
'no-port-forwarding {key} {name}').format(
pypaas_cmd=os.path.join(
os.path.dirname(sys.executable), 'pypaas'
),
key=key,
name=name
)
)
util.replace_file(os.path.join(ssh_dir, 'authorized_keys'),
'\n'.join(lines)+'\n')
|
Allow multiple SSH keys per file
|
Allow multiple SSH keys per file
|
Python
|
mit
|
fintura/pyPaaS,fintura/pyPaaS
|
import os.path
import sys
from . import util
class SSHKey(object):
@staticmethod
def rebuild_authorized_keys():
lines = []
ssh_dir = os.path.expanduser('~/.ssh')
util.mkdir_p(os.path.join(ssh_dir, 'authorized_keys.d'))
for name in os.listdir(os.path.join(ssh_dir, 'authorized_keys.d')):
- key = open(os.path.join(ssh_dir, 'authorized_keys.d', name)).read()
+ keyfilename = os.path.join(ssh_dir, 'authorized_keys.d', name)
+ with open(keyfilename) as keyfile:
+ for key in keyfile:
- keyparts = key.split()
+ keyparts = key.split()
- assert keyparts[0].startswith('ssh-')
+ assert keyparts[0].startswith('ssh-')
- key = ' '.join(keyparts[:2])
+ key = ' '.join(keyparts[:2])
- name = name.replace('.pub', '')
+ name = name.replace('.pub', '')
- lines.append(
+ lines.append(
- ('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
+ ('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
- 'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
+ 'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
- 'no-port-forwarding {key} {name}').format(
+ 'no-port-forwarding {key} {name}').format(
- pypaas_cmd=os.path.join(
+ pypaas_cmd=os.path.join(
- os.path.dirname(sys.executable), 'pypaas'
+ os.path.dirname(sys.executable), 'pypaas'
+ ),
+ key=key,
+ name=name
+ )
- ),
+ )
- key=key,
- name=name
- )
- )
util.replace_file(os.path.join(ssh_dir, 'authorized_keys'),
'\n'.join(lines)+'\n')
|
Allow multiple SSH keys per file
|
## Code Before:
import os.path
import sys
from . import util
class SSHKey(object):
@staticmethod
def rebuild_authorized_keys():
lines = []
ssh_dir = os.path.expanduser('~/.ssh')
util.mkdir_p(os.path.join(ssh_dir, 'authorized_keys.d'))
for name in os.listdir(os.path.join(ssh_dir, 'authorized_keys.d')):
key = open(os.path.join(ssh_dir, 'authorized_keys.d', name)).read()
keyparts = key.split()
assert keyparts[0].startswith('ssh-')
key = ' '.join(keyparts[:2])
name = name.replace('.pub', '')
lines.append(
('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
'no-port-forwarding {key} {name}').format(
pypaas_cmd=os.path.join(
os.path.dirname(sys.executable), 'pypaas'
),
key=key,
name=name
)
)
util.replace_file(os.path.join(ssh_dir, 'authorized_keys'),
'\n'.join(lines)+'\n')
## Instruction:
Allow multiple SSH keys per file
## Code After:
import os.path
import sys
from . import util
class SSHKey(object):
@staticmethod
def rebuild_authorized_keys():
lines = []
ssh_dir = os.path.expanduser('~/.ssh')
util.mkdir_p(os.path.join(ssh_dir, 'authorized_keys.d'))
for name in os.listdir(os.path.join(ssh_dir, 'authorized_keys.d')):
keyfilename = os.path.join(ssh_dir, 'authorized_keys.d', name)
with open(keyfilename) as keyfile:
for key in keyfile:
keyparts = key.split()
assert keyparts[0].startswith('ssh-')
key = ' '.join(keyparts[:2])
name = name.replace('.pub', '')
lines.append(
('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
'no-port-forwarding {key} {name}').format(
pypaas_cmd=os.path.join(
os.path.dirname(sys.executable), 'pypaas'
),
key=key,
name=name
)
)
util.replace_file(os.path.join(ssh_dir, 'authorized_keys'),
'\n'.join(lines)+'\n')
|
import os.path
import sys
from . import util
class SSHKey(object):
@staticmethod
def rebuild_authorized_keys():
lines = []
ssh_dir = os.path.expanduser('~/.ssh')
util.mkdir_p(os.path.join(ssh_dir, 'authorized_keys.d'))
for name in os.listdir(os.path.join(ssh_dir, 'authorized_keys.d')):
- key = open(os.path.join(ssh_dir, 'authorized_keys.d', name)).read()
? ----- --------
+ keyfilename = os.path.join(ssh_dir, 'authorized_keys.d', name)
? ++++++++
+ with open(keyfilename) as keyfile:
+ for key in keyfile:
- keyparts = key.split()
+ keyparts = key.split()
? ++++++++
- assert keyparts[0].startswith('ssh-')
+ assert keyparts[0].startswith('ssh-')
? ++++++++
- key = ' '.join(keyparts[:2])
+ key = ' '.join(keyparts[:2])
? ++++++++
- name = name.replace('.pub', '')
+ name = name.replace('.pub', '')
? ++++++++
- lines.append(
+ lines.append(
? ++++++++
- ('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
+ ('command="{pypaas_cmd} $SSH_ORIGINAL_COMMAND",' +
? ++++++++
- 'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
+ 'no-agent-forwarding,no-user-rc,no-X11-forwarding,' +
? ++++++++
- 'no-port-forwarding {key} {name}').format(
+ 'no-port-forwarding {key} {name}').format(
? ++++++++
- pypaas_cmd=os.path.join(
+ pypaas_cmd=os.path.join(
? ++++++++
- os.path.dirname(sys.executable), 'pypaas'
+ os.path.dirname(sys.executable), 'pypaas'
? ++++++++
+ ),
+ key=key,
+ name=name
+ )
- ),
? -
+ )
- key=key,
- name=name
- )
- )
util.replace_file(os.path.join(ssh_dir, 'authorized_keys'),
'\n'.join(lines)+'\n')
|
0d59cf159d9c5f6c64c49cc7ef3cef8feaf5452d
|
templatetags/coltrane.py
|
templatetags/coltrane.py
|
from django.db.models import get_model
from django import template
from django.contrib.comments.models import Comment, FreeComment
from coltrane.models import Entry, Link
register = template.Library()
class LatestFeaturedNode(template.Node):
def __init__(self, varname):
self.varname = varname
def render(self, context):
context[self.varname] = Entry.live.latest_featured()
return ''
def do_latest_featured(parser, token):
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
{% get_latest_featured_entry as [varname] %}
Example::
{% get_latest_featured_entry as latest_featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode(bits[2])
register.tag('get_latest_featured_entry', do_latest_featured)
|
from django.db.models import get_model
from django import template
from django.contrib.comments.models import Comment, FreeComment
from template_utils.templatetags.generic_content import GenericContentNode
from coltrane.models import Entry, Link
register = template.Library()
class LatestFeaturedNode(GenericContentNode):
def _get_query_set(self):
if self._queryset is not None:
self._queryset = self._queryset.filter(featured__exact=True)
def do_featured_entries(parser, token):
"""
Retrieves the latest ``num`` featured entries and stores them in a
specified context variable.
Syntax::
{% get_featured_entries [num] as [varname] %}
Example::
{% get_featured_entries 5 as featured_entries %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes three arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("second argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode('coltrane.entry', bits[1], bits[3])
def do_featured_entry(parser, token):
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
{% get_featured_entry as [varname] %}
Example::
{% get_featured_entry as featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode('coltrane.entry', 1, bits[2])
register.tag('get_featured_entries', do_featured_entries)
register.tag('get_featured_entry', do_featured_entry)
|
Refactor LatestFeaturedNode to use GenericContentNode and accept a configurable number of entries to fetch
|
Refactor LatestFeaturedNode to use GenericContentNode and accept a configurable number of entries to fetch
git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@54 5f8205a5-902a-0410-8b63-8f478ce83d95
|
Python
|
bsd-3-clause
|
clones/django-coltrane,mafix/coltrane-blog
|
from django.db.models import get_model
from django import template
from django.contrib.comments.models import Comment, FreeComment
+ from template_utils.templatetags.generic_content import GenericContentNode
+
from coltrane.models import Entry, Link
register = template.Library()
- class LatestFeaturedNode(template.Node):
+ class LatestFeaturedNode(GenericContentNode):
+ def _get_query_set(self):
+ if self._queryset is not None:
+ self._queryset = self._queryset.filter(featured__exact=True)
- def __init__(self, varname):
- self.varname = varname
-
- def render(self, context):
- context[self.varname] = Entry.live.latest_featured()
- return ''
+ def do_featured_entries(parser, token):
+ """
+ Retrieves the latest ``num`` featured entries and stores them in a
+ specified context variable.
+
+ Syntax::
+
+ {% get_featured_entries [num] as [varname] %}
+
+ Example::
+
+ {% get_featured_entries 5 as featured_entries %}
+
+ """
+ bits = token.contents.split()
+ if len(bits) != 4:
+ raise template.TemplateSyntaxError("'%s' tag takes three arguments" % bits[0])
+ if bits[2] != 'as':
+ raise template.TemplateSyntaxError("second argument to '%s' tag must be 'as'" % bits[0])
+ return LatestFeaturedNode('coltrane.entry', bits[1], bits[3])
+
- def do_latest_featured(parser, token):
+ def do_featured_entry(parser, token):
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
- {% get_latest_featured_entry as [varname] %}
+ {% get_featured_entry as [varname] %}
Example::
- {% get_latest_featured_entry as latest_featured_entry %}
+ {% get_featured_entry as featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
- return LatestFeaturedNode(bits[2])
+ return LatestFeaturedNode('coltrane.entry', 1, bits[2])
+ register.tag('get_featured_entries', do_featured_entries)
- register.tag('get_latest_featured_entry', do_latest_featured)
+ register.tag('get_featured_entry', do_featured_entry)
|
Refactor LatestFeaturedNode to use GenericContentNode and accept a configurable number of entries to fetch
|
## Code Before:
from django.db.models import get_model
from django import template
from django.contrib.comments.models import Comment, FreeComment
from coltrane.models import Entry, Link
register = template.Library()
class LatestFeaturedNode(template.Node):
def __init__(self, varname):
self.varname = varname
def render(self, context):
context[self.varname] = Entry.live.latest_featured()
return ''
def do_latest_featured(parser, token):
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
{% get_latest_featured_entry as [varname] %}
Example::
{% get_latest_featured_entry as latest_featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode(bits[2])
register.tag('get_latest_featured_entry', do_latest_featured)
## Instruction:
Refactor LatestFeaturedNode to use GenericContentNode and accept a configurable number of entries to fetch
## Code After:
from django.db.models import get_model
from django import template
from django.contrib.comments.models import Comment, FreeComment
from template_utils.templatetags.generic_content import GenericContentNode
from coltrane.models import Entry, Link
register = template.Library()
class LatestFeaturedNode(GenericContentNode):
def _get_query_set(self):
if self._queryset is not None:
self._queryset = self._queryset.filter(featured__exact=True)
def do_featured_entries(parser, token):
"""
Retrieves the latest ``num`` featured entries and stores them in a
specified context variable.
Syntax::
{% get_featured_entries [num] as [varname] %}
Example::
{% get_featured_entries 5 as featured_entries %}
"""
bits = token.contents.split()
if len(bits) != 4:
raise template.TemplateSyntaxError("'%s' tag takes three arguments" % bits[0])
if bits[2] != 'as':
raise template.TemplateSyntaxError("second argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode('coltrane.entry', bits[1], bits[3])
def do_featured_entry(parser, token):
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
{% get_featured_entry as [varname] %}
Example::
{% get_featured_entry as featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
return LatestFeaturedNode('coltrane.entry', 1, bits[2])
register.tag('get_featured_entries', do_featured_entries)
register.tag('get_featured_entry', do_featured_entry)
|
from django.db.models import get_model
from django import template
from django.contrib.comments.models import Comment, FreeComment
+ from template_utils.templatetags.generic_content import GenericContentNode
+
from coltrane.models import Entry, Link
register = template.Library()
- class LatestFeaturedNode(template.Node):
? ^^^^ --
+ class LatestFeaturedNode(GenericContentNode):
? ++++++++++ ^
+ def _get_query_set(self):
+ if self._queryset is not None:
+ self._queryset = self._queryset.filter(featured__exact=True)
- def __init__(self, varname):
- self.varname = varname
-
- def render(self, context):
- context[self.varname] = Entry.live.latest_featured()
- return ''
+ def do_featured_entries(parser, token):
+ """
+ Retrieves the latest ``num`` featured entries and stores them in a
+ specified context variable.
+
+ Syntax::
+
+ {% get_featured_entries [num] as [varname] %}
+
+ Example::
+
+ {% get_featured_entries 5 as featured_entries %}
+
+ """
+ bits = token.contents.split()
+ if len(bits) != 4:
+ raise template.TemplateSyntaxError("'%s' tag takes three arguments" % bits[0])
+ if bits[2] != 'as':
+ raise template.TemplateSyntaxError("second argument to '%s' tag must be 'as'" % bits[0])
+ return LatestFeaturedNode('coltrane.entry', bits[1], bits[3])
+
- def do_latest_featured(parser, token):
? -------
+ def do_featured_entry(parser, token):
? ++++++
"""
Retrieves the latest featured Entry and stores it in a specified
context variable.
Syntax::
- {% get_latest_featured_entry as [varname] %}
? -------
+ {% get_featured_entry as [varname] %}
Example::
- {% get_latest_featured_entry as latest_featured_entry %}
? ------- -------
+ {% get_featured_entry as featured_entry %}
"""
bits = token.contents.split()
if len(bits) != 3:
raise template.TemplateSyntaxError("'%s' tag takes two arguments" % bits[0])
if bits[1] != 'as':
raise template.TemplateSyntaxError("first argument to '%s' tag must be 'as'" % bits[0])
- return LatestFeaturedNode(bits[2])
+ return LatestFeaturedNode('coltrane.entry', 1, bits[2])
? +++++++++++++++++++++
+ register.tag('get_featured_entries', do_featured_entries)
- register.tag('get_latest_featured_entry', do_latest_featured)
? ------- -------
+ register.tag('get_featured_entry', do_featured_entry)
? ++++++
|
f9a8e5107cc3f9d94f43bd5ce60054f849be2c15
|
tests/utils.py
|
tests/utils.py
|
import copy
import os
from django.conf import settings
from django.template import Context
from django.test import override_settings
HERE = os.path.dirname(__file__)
def template_path(path):
return os.path.join(HERE, 'templates', path, '')
def template_dirs(*relative_dirs):
"""
Convenient decorator to specify the template path.
"""
# copy the original setting
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
for tpl_cfg in TEMPLATES:
tpl_cfg['DIRS'] = [template_path(rel_dir) for rel_dir in relative_dirs]
return override_settings(TEMPLATES=TEMPLATES)
class TemplateTestMixin(object):
def setUp(self):
self.ctx = Context()
def assertNotInHTML(self, needle, haystack, msg_prefix=''):
self.assertInHTML(needle, haystack, count=0, msg_prefix=msg_prefix)
|
import copy
import os
from django.conf import settings
from django.test import override_settings
HERE = os.path.dirname(__file__)
def template_path(path):
return os.path.join(HERE, 'templates', path, '')
def template_dirs(*relative_dirs):
"""
Convenient decorator to specify the template path.
"""
# copy the original setting
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
for tpl_cfg in TEMPLATES:
tpl_cfg['DIRS'] = [template_path(rel_dir) for rel_dir in relative_dirs]
return override_settings(TEMPLATES=TEMPLATES)
class TemplateTestMixin(object):
def setUp(self):
self.ctx = {}
def assertNotInHTML(self, needle, haystack, msg_prefix=''):
self.assertInHTML(needle, haystack, count=0, msg_prefix=msg_prefix)
|
Fix use of Context for dj1.11
|
Fix use of Context for dj1.11
|
Python
|
mit
|
funkybob/django-sniplates,funkybob/django-sniplates
|
import copy
import os
from django.conf import settings
- from django.template import Context
from django.test import override_settings
HERE = os.path.dirname(__file__)
def template_path(path):
return os.path.join(HERE, 'templates', path, '')
def template_dirs(*relative_dirs):
"""
Convenient decorator to specify the template path.
"""
# copy the original setting
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
for tpl_cfg in TEMPLATES:
tpl_cfg['DIRS'] = [template_path(rel_dir) for rel_dir in relative_dirs]
return override_settings(TEMPLATES=TEMPLATES)
class TemplateTestMixin(object):
def setUp(self):
- self.ctx = Context()
+ self.ctx = {}
def assertNotInHTML(self, needle, haystack, msg_prefix=''):
self.assertInHTML(needle, haystack, count=0, msg_prefix=msg_prefix)
|
Fix use of Context for dj1.11
|
## Code Before:
import copy
import os
from django.conf import settings
from django.template import Context
from django.test import override_settings
HERE = os.path.dirname(__file__)
def template_path(path):
return os.path.join(HERE, 'templates', path, '')
def template_dirs(*relative_dirs):
"""
Convenient decorator to specify the template path.
"""
# copy the original setting
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
for tpl_cfg in TEMPLATES:
tpl_cfg['DIRS'] = [template_path(rel_dir) for rel_dir in relative_dirs]
return override_settings(TEMPLATES=TEMPLATES)
class TemplateTestMixin(object):
def setUp(self):
self.ctx = Context()
def assertNotInHTML(self, needle, haystack, msg_prefix=''):
self.assertInHTML(needle, haystack, count=0, msg_prefix=msg_prefix)
## Instruction:
Fix use of Context for dj1.11
## Code After:
import copy
import os
from django.conf import settings
from django.test import override_settings
HERE = os.path.dirname(__file__)
def template_path(path):
return os.path.join(HERE, 'templates', path, '')
def template_dirs(*relative_dirs):
"""
Convenient decorator to specify the template path.
"""
# copy the original setting
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
for tpl_cfg in TEMPLATES:
tpl_cfg['DIRS'] = [template_path(rel_dir) for rel_dir in relative_dirs]
return override_settings(TEMPLATES=TEMPLATES)
class TemplateTestMixin(object):
def setUp(self):
self.ctx = {}
def assertNotInHTML(self, needle, haystack, msg_prefix=''):
self.assertInHTML(needle, haystack, count=0, msg_prefix=msg_prefix)
|
import copy
import os
from django.conf import settings
- from django.template import Context
from django.test import override_settings
HERE = os.path.dirname(__file__)
def template_path(path):
return os.path.join(HERE, 'templates', path, '')
def template_dirs(*relative_dirs):
"""
Convenient decorator to specify the template path.
"""
# copy the original setting
TEMPLATES = copy.deepcopy(settings.TEMPLATES)
for tpl_cfg in TEMPLATES:
tpl_cfg['DIRS'] = [template_path(rel_dir) for rel_dir in relative_dirs]
return override_settings(TEMPLATES=TEMPLATES)
class TemplateTestMixin(object):
def setUp(self):
- self.ctx = Context()
? ^^^^^^^^^
+ self.ctx = {}
? ^^
def assertNotInHTML(self, needle, haystack, msg_prefix=''):
self.assertInHTML(needle, haystack, count=0, msg_prefix=msg_prefix)
|
008f0a2b0a7823e619410c5af70061d093c6f3de
|
timeseries.py
|
timeseries.py
|
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
x.append(obj['timestamp'])
autoscale(True, 'both')
plot(x, y, label = y_key)
legend(loc='upper left')
show()
if __name__ == "__main__":
main()
|
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
parser.add_argument("-x",
help = "the key to use for the function being plotted",
default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
if x_key is None:
x.append(obj['timestamp'])
if obj['name'] == x_key:
x.append(obj['value'])
autoscale(True, 'both')
xlabel(x_key or 'timestamp')
ylabel(y_key)
plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
|
Allow plotting two types against one another.
|
Allow plotting two types against one another.
|
Python
|
bsd-3-clause
|
openxc/openxc-data-tools
|
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
+ parser.add_argument("-x",
+ help = "the key to use for the function being plotted",
+ default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
+ x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
+ if x_key is None:
- x.append(obj['timestamp'])
+ x.append(obj['timestamp'])
+ if obj['name'] == x_key:
+ x.append(obj['value'])
autoscale(True, 'both')
- plot(x, y, label = y_key)
- legend(loc='upper left')
+ xlabel(x_key or 'timestamp')
+ ylabel(y_key)
+ plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
|
Allow plotting two types against one another.
|
## Code Before:
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
x.append(obj['timestamp'])
autoscale(True, 'both')
plot(x, y, label = y_key)
legend(loc='upper left')
show()
if __name__ == "__main__":
main()
## Instruction:
Allow plotting two types against one another.
## Code After:
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
parser.add_argument("-x",
help = "the key to use for the function being plotted",
default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
if x_key is None:
x.append(obj['timestamp'])
if obj['name'] == x_key:
x.append(obj['value'])
autoscale(True, 'both')
xlabel(x_key or 'timestamp')
ylabel(y_key)
plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
|
import json
import sys
import argparse
from pylab import *
def main():
#Set up the command line argument parser
parser = argparse.ArgumentParser()
parser.add_argument("input_file",
help = "name of the input file")
parser.add_argument("-y",
help = "the key to use for the function being plotted")
+ parser.add_argument("-x",
+ help = "the key to use for the function being plotted",
+ default=None)
args = parser.parse_args()
input_file_name = args.input_file
y_key = str(args.y)
+ x_key = args.x
#initialize the x axis and function to be plotted
x = []
y = []
with open(input_file_name, 'r') as input_file:
for line in input_file:
if not line.rstrip() : continue
try:
obj = json.loads(line)
except ValueError:
print("Skipping invalid JSON: %s" % line)
continue
#if the parsed JSON object has the key we're looking for,
#add the key's value to the y graph and the timestamp
#to the x list
if obj['name'] == y_key:
y.append(obj['value'])
+ if x_key is None:
- x.append(obj['timestamp'])
+ x.append(obj['timestamp'])
? ++++
+ if obj['name'] == x_key:
+ x.append(obj['value'])
autoscale(True, 'both')
- plot(x, y, label = y_key)
- legend(loc='upper left')
+ xlabel(x_key or 'timestamp')
+ ylabel(y_key)
+ plot(x, y, 'ro')
show()
if __name__ == "__main__":
main()
|
1bda3fa8b3bffaca38b26191602e74d0afeaad19
|
app/views.py
|
app/views.py
|
from flask import render_template, request, Blueprint
import json
from app.state import state
index = Blueprint('index', __name__, template_folder='templates')
@index.route('/', methods=['POST', 'GET'])
def show():
if request.method == 'GET':
return render_template('index.html',
program = 'ascii_text',
brightness = 0.5,
rotation = 0,
text = 'Hello, World!',
)
elif request.method == 'POST':
params = {}
if 'brightness' in request.form:
params['brightness'] = request.form['brightness']
if 'rotation' in request.form:
params['rotation'] = request.form['rotation']
if 'text' in request.form:
params['text'] = request.form['text']
if request.form['submit'] == 'Run':
state.start_program(request.form['program'], params)
elif request.form['submit'] == 'Stop':
state.stop_program()
return render_template('index.html',
program = request.form['program'],
brightness = request.form['brightness'],
rotation = request.form['rotation'],
text = request.form['text'],
)
|
from flask import render_template, request, Blueprint
import json
from app.state import state
index = Blueprint('index', __name__, template_folder='templates')
@index.route('/', methods=['GET'])
def show():
if request.method == 'GET':
return render_template('index.html')
|
Remove old web ui backend
|
Remove old web ui backend
|
Python
|
mit
|
njbbaer/unicorn-remote,njbbaer/unicorn-remote,njbbaer/unicorn-remote
|
from flask import render_template, request, Blueprint
import json
from app.state import state
index = Blueprint('index', __name__, template_folder='templates')
- @index.route('/', methods=['POST', 'GET'])
+ @index.route('/', methods=['GET'])
def show():
-
+
if request.method == 'GET':
- return render_template('index.html',
+ return render_template('index.html')
- program = 'ascii_text',
- brightness = 0.5,
- rotation = 0,
- text = 'Hello, World!',
- )
-
- elif request.method == 'POST':
- params = {}
-
- if 'brightness' in request.form:
- params['brightness'] = request.form['brightness']
-
- if 'rotation' in request.form:
- params['rotation'] = request.form['rotation']
-
- if 'text' in request.form:
- params['text'] = request.form['text']
-
- if request.form['submit'] == 'Run':
- state.start_program(request.form['program'], params)
- elif request.form['submit'] == 'Stop':
- state.stop_program()
-
- return render_template('index.html',
- program = request.form['program'],
- brightness = request.form['brightness'],
- rotation = request.form['rotation'],
- text = request.form['text'],
- )
|
Remove old web ui backend
|
## Code Before:
from flask import render_template, request, Blueprint
import json
from app.state import state
index = Blueprint('index', __name__, template_folder='templates')
@index.route('/', methods=['POST', 'GET'])
def show():
if request.method == 'GET':
return render_template('index.html',
program = 'ascii_text',
brightness = 0.5,
rotation = 0,
text = 'Hello, World!',
)
elif request.method == 'POST':
params = {}
if 'brightness' in request.form:
params['brightness'] = request.form['brightness']
if 'rotation' in request.form:
params['rotation'] = request.form['rotation']
if 'text' in request.form:
params['text'] = request.form['text']
if request.form['submit'] == 'Run':
state.start_program(request.form['program'], params)
elif request.form['submit'] == 'Stop':
state.stop_program()
return render_template('index.html',
program = request.form['program'],
brightness = request.form['brightness'],
rotation = request.form['rotation'],
text = request.form['text'],
)
## Instruction:
Remove old web ui backend
## Code After:
from flask import render_template, request, Blueprint
import json
from app.state import state
index = Blueprint('index', __name__, template_folder='templates')
@index.route('/', methods=['GET'])
def show():
if request.method == 'GET':
return render_template('index.html')
|
from flask import render_template, request, Blueprint
import json
from app.state import state
index = Blueprint('index', __name__, template_folder='templates')
- @index.route('/', methods=['POST', 'GET'])
? --------
+ @index.route('/', methods=['GET'])
def show():
-
+
if request.method == 'GET':
- return render_template('index.html',
? ^^
+ return render_template('index.html')
? + ^
- program = 'ascii_text',
- brightness = 0.5,
- rotation = 0,
- text = 'Hello, World!',
- )
-
- elif request.method == 'POST':
- params = {}
-
- if 'brightness' in request.form:
- params['brightness'] = request.form['brightness']
-
- if 'rotation' in request.form:
- params['rotation'] = request.form['rotation']
-
- if 'text' in request.form:
- params['text'] = request.form['text']
-
- if request.form['submit'] == 'Run':
- state.start_program(request.form['program'], params)
- elif request.form['submit'] == 'Stop':
- state.stop_program()
-
- return render_template('index.html',
- program = request.form['program'],
- brightness = request.form['brightness'],
- rotation = request.form['rotation'],
- text = request.form['text'],
- )
|
f269b7b5c7b0bb0973f504db1c3605c9ff3ac6da
|
tutorials/2017/thursday/graphColouring.py
|
tutorials/2017/thursday/graphColouring.py
|
from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
|
from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
def SAT2graphColouring(sol):
d = {i: j for (i, j), v in sol.items() if v}
out = [None] * len(d)
for i, j in d.items():
out[i] = j
return out
|
Add function to extract solution from valuation
|
Add function to extract solution from valuation
|
Python
|
mit
|
jaanos/LVR-2016,jaanos/LVR-2016
|
from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
+ def SAT2graphColouring(sol):
+ d = {i: j for (i, j), v in sol.items() if v}
+ out = [None] * len(d)
+ for i, j in d.items():
+ out[i] = j
+ return out
+
|
Add function to extract solution from valuation
|
## Code Before:
from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
## Instruction:
Add function to extract solution from valuation
## Code After:
from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
def SAT2graphColouring(sol):
d = {i: j for (i, j), v in sol.items() if v}
out = [None] * len(d)
for i, j in d.items():
out[i] = j
return out
|
from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
+
+ def SAT2graphColouring(sol):
+ d = {i: j for (i, j), v in sol.items() if v}
+ out = [None] * len(d)
+ for i, j in d.items():
+ out[i] = j
+ return out
|
dcc810f3181ebe358481c30c2248d25511aab26c
|
npz_to_my5c.py
|
npz_to_my5c.py
|
import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t')
|
import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t', na_rep="NaN")
|
Handle NaNs properly when flattening matrices.
|
Handle NaNs properly when flattening matrices.
|
Python
|
apache-2.0
|
pombo-lab/gamtools,pombo-lab/gamtools
|
import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
- pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t')
+ pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t', na_rep="NaN")
|
Handle NaNs properly when flattening matrices.
|
## Code Before:
import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t')
## Instruction:
Handle NaNs properly when flattening matrices.
## Code After:
import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t', na_rep="NaN")
|
import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
- pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t')
+ pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t', na_rep="NaN")
? ++++++++++++++
|
d379badd16528d2f7cd3826fcef5bd87be30cccf
|
nightreads/user_manager/user_service.py
|
nightreads/user_manager/user_service.py
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
def get_or_create_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
def get_user(email):
return User.objects.filter(username=email).first()
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
def update_subscription(user, status):
user.subscription.is_subscribed = status
user.save()
|
Rename `get_user` to `get_or_create_user` and add a new `get_user`
|
Rename `get_user` to `get_or_create_user` and add a new `get_user`
|
Python
|
mit
|
avinassh/nightreads,avinassh/nightreads
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
- def get_user(email):
+ def get_or_create_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
+
+
+ def get_user(email):
+ return User.objects.filter(username=email).first()
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
+
+ def update_subscription(user, status):
+ user.subscription.is_subscribed = status
+ user.save()
+
|
Rename `get_user` to `get_or_create_user` and add a new `get_user`
|
## Code Before:
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
def get_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
## Instruction:
Rename `get_user` to `get_or_create_user` and add a new `get_user`
## Code After:
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
def get_or_create_user(email):
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
def get_user(email):
return User.objects.filter(username=email).first()
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
def update_subscription(user, status):
user.subscription.is_subscribed = status
user.save()
|
from django.contrib.auth.models import User
from nightreads.posts.models import Tag
from .models import (Subscription, SubscriptionActivation,
UnsubscriptionActivation)
def update_user_tags(user, tags):
tags_objs = Tag.objects.filter(name__in=tags)
if tags_objs:
user.subscription.tags.clear()
user.subscription.tags.add(*tags_objs)
user.save()
return True
return False
- def get_user(email):
+ def get_or_create_user(email):
? ++++++++++
user, created = User.objects.get_or_create(username=email)
if created:
Subscription.objects.create(user=user)
return user
+
+
+ def get_user(email):
+ return User.objects.filter(username=email).first()
def generate_subscribe_key(user):
subscribe_key = User.objects.make_random_password(length=80)
SubscriptionActivation.objects.update_or_create(
user=user, defaults={'subscribe_key': subscribe_key})
return subscribe_key
def generate_unsubscribe_key(user):
unsubscribe_key = User.objects.make_random_password(length=80)
UnsubscriptionActivation.objects.update_or_create(
user=user, defaults={'unsubscribe_key': unsubscribe_key})
return unsubscribe_key
+
+
+ def update_subscription(user, status):
+ user.subscription.is_subscribed = status
+ user.save()
|
8cee7d5478cde2b188da4dc93f844073be729a48
|
src/gerobak/apps/profile/models.py
|
src/gerobak/apps/profile/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from gerobak import utils
class Profile(models.Model):
pid = models.CharField(max_length=8)
user = models.ForeignKey(User)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=50)
desc = models.CharField(max_length=250, null=True, blank=True)
arch = models.CharField(max_length=10, choices=settings.GEROBAK_ARCHS,
default=settings.GEROBAK_DEFAULT_ARCH)
repo_updated = models.DateTimeField(null=True, default=None)
sources_updated = models.DateTimeField(null=True, default=None)
sources_total = models.IntegerField(null=True, default=None)
status_updated = models.DateTimeField(null=True, default=None)
status_hash = models.CharField(max_length=32, null=True, default=None)
status_size = models.IntegerField(default=0)
def is_ready(self):
return self.repo_updated is not None and \
self.status_updated is not None
def generate_pid(self):
import uuid
return str(uuid.uuid4()).split('-')[0]
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from gerobak import utils
class Profile(models.Model):
pid = models.CharField(max_length=8)
user = models.ForeignKey(User)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=50)
desc = models.CharField(max_length=250, null=True, blank=True)
arch = models.CharField(max_length=10, choices=settings.GEROBAK_ARCHS,
default=settings.GEROBAK_DEFAULT_ARCH)
repo_updated = models.DateTimeField(null=True, default=None)
sources_updated = models.DateTimeField(null=True, default=None)
sources_total = models.IntegerField(null=True, default=None)
status_updated = models.DateTimeField(null=True, default=None)
status_hash = models.CharField(max_length=32, null=True, default=None)
status_size = models.IntegerField(default=0)
tid_update = models.CharField(max_length=36, null=True, default=None)
tid_install = models.CharField(max_length=36, null=True, default=None)
tid_upgrade = models.CharField(max_length=36, null=True, default=None)
def is_ready(self):
return self.repo_updated is not None and \
self.status_updated is not None
def generate_pid(self):
import uuid
return str(uuid.uuid4()).split('-')[0]
|
Store task_id for update, install, and upgrade processes in the database.
|
Store task_id for update, install, and upgrade processes in the database.
|
Python
|
agpl-3.0
|
fajran/gerobak,fajran/gerobak
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from gerobak import utils
class Profile(models.Model):
pid = models.CharField(max_length=8)
user = models.ForeignKey(User)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=50)
desc = models.CharField(max_length=250, null=True, blank=True)
arch = models.CharField(max_length=10, choices=settings.GEROBAK_ARCHS,
default=settings.GEROBAK_DEFAULT_ARCH)
repo_updated = models.DateTimeField(null=True, default=None)
sources_updated = models.DateTimeField(null=True, default=None)
sources_total = models.IntegerField(null=True, default=None)
status_updated = models.DateTimeField(null=True, default=None)
status_hash = models.CharField(max_length=32, null=True, default=None)
status_size = models.IntegerField(default=0)
+ tid_update = models.CharField(max_length=36, null=True, default=None)
+ tid_install = models.CharField(max_length=36, null=True, default=None)
+ tid_upgrade = models.CharField(max_length=36, null=True, default=None)
+
def is_ready(self):
return self.repo_updated is not None and \
self.status_updated is not None
def generate_pid(self):
import uuid
return str(uuid.uuid4()).split('-')[0]
|
Store task_id for update, install, and upgrade processes in the database.
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from gerobak import utils
class Profile(models.Model):
pid = models.CharField(max_length=8)
user = models.ForeignKey(User)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=50)
desc = models.CharField(max_length=250, null=True, blank=True)
arch = models.CharField(max_length=10, choices=settings.GEROBAK_ARCHS,
default=settings.GEROBAK_DEFAULT_ARCH)
repo_updated = models.DateTimeField(null=True, default=None)
sources_updated = models.DateTimeField(null=True, default=None)
sources_total = models.IntegerField(null=True, default=None)
status_updated = models.DateTimeField(null=True, default=None)
status_hash = models.CharField(max_length=32, null=True, default=None)
status_size = models.IntegerField(default=0)
def is_ready(self):
return self.repo_updated is not None and \
self.status_updated is not None
def generate_pid(self):
import uuid
return str(uuid.uuid4()).split('-')[0]
## Instruction:
Store task_id for update, install, and upgrade processes in the database.
## Code After:
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from gerobak import utils
class Profile(models.Model):
pid = models.CharField(max_length=8)
user = models.ForeignKey(User)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=50)
desc = models.CharField(max_length=250, null=True, blank=True)
arch = models.CharField(max_length=10, choices=settings.GEROBAK_ARCHS,
default=settings.GEROBAK_DEFAULT_ARCH)
repo_updated = models.DateTimeField(null=True, default=None)
sources_updated = models.DateTimeField(null=True, default=None)
sources_total = models.IntegerField(null=True, default=None)
status_updated = models.DateTimeField(null=True, default=None)
status_hash = models.CharField(max_length=32, null=True, default=None)
status_size = models.IntegerField(default=0)
tid_update = models.CharField(max_length=36, null=True, default=None)
tid_install = models.CharField(max_length=36, null=True, default=None)
tid_upgrade = models.CharField(max_length=36, null=True, default=None)
def is_ready(self):
return self.repo_updated is not None and \
self.status_updated is not None
def generate_pid(self):
import uuid
return str(uuid.uuid4()).split('-')[0]
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.signals import post_save
from gerobak import utils
class Profile(models.Model):
pid = models.CharField(max_length=8)
user = models.ForeignKey(User)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
name = models.CharField(max_length=50)
desc = models.CharField(max_length=250, null=True, blank=True)
arch = models.CharField(max_length=10, choices=settings.GEROBAK_ARCHS,
default=settings.GEROBAK_DEFAULT_ARCH)
repo_updated = models.DateTimeField(null=True, default=None)
sources_updated = models.DateTimeField(null=True, default=None)
sources_total = models.IntegerField(null=True, default=None)
status_updated = models.DateTimeField(null=True, default=None)
status_hash = models.CharField(max_length=32, null=True, default=None)
status_size = models.IntegerField(default=0)
+ tid_update = models.CharField(max_length=36, null=True, default=None)
+ tid_install = models.CharField(max_length=36, null=True, default=None)
+ tid_upgrade = models.CharField(max_length=36, null=True, default=None)
+
def is_ready(self):
return self.repo_updated is not None and \
self.status_updated is not None
def generate_pid(self):
import uuid
return str(uuid.uuid4()).split('-')[0]
|
ddf2075228a8c250cf75ec85914801262cb73177
|
zerver/migrations/0032_verify_all_medium_avatar_images.py
|
zerver/migrations/0032_verify_all_medium_avatar_images.py
|
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import upload_backend
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from mock import patch
from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
from zerver.models import UserProfile
from typing import Text
import hashlib
# We hackishly patch this function in order to revert it to the state
# it had when this migration was first written. This is a balance
# between copying in a historical version of hundreds of lines of code
# from zerver.lib.upload (which would pretty annoying, but would be a
# pain) and just using the current version, which doesn't work
# since we rearranged the avatars in Zulip 1.6.
def patched_user_avatar_path(user_profile):
# type: (UserProfile) -> Text
email = user_profile.email
user_key = email.lower() + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
@patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
Make migration 0032 use an old version of user_avatar_path.
|
Make migration 0032 use an old version of user_avatar_path.
This fixes upgrading from very old Zulip servers (e.g. 1.4.3) all the
way to current.
Fixes: #6516.
|
Python
|
apache-2.0
|
hackerkid/zulip,kou/zulip,amanharitsh123/zulip,brockwhittaker/zulip,showell/zulip,hackerkid/zulip,rishig/zulip,verma-varsha/zulip,synicalsyntax/zulip,zulip/zulip,amanharitsh123/zulip,showell/zulip,punchagan/zulip,amanharitsh123/zulip,punchagan/zulip,timabbott/zulip,rht/zulip,tommyip/zulip,eeshangarg/zulip,Galexrt/zulip,eeshangarg/zulip,rishig/zulip,rishig/zulip,Galexrt/zulip,dhcrzf/zulip,rht/zulip,shubhamdhama/zulip,rht/zulip,tommyip/zulip,mahim97/zulip,kou/zulip,kou/zulip,timabbott/zulip,zulip/zulip,brainwane/zulip,verma-varsha/zulip,verma-varsha/zulip,punchagan/zulip,brainwane/zulip,zulip/zulip,eeshangarg/zulip,timabbott/zulip,synicalsyntax/zulip,brockwhittaker/zulip,brockwhittaker/zulip,brainwane/zulip,showell/zulip,brainwane/zulip,brainwane/zulip,rishig/zulip,tommyip/zulip,Galexrt/zulip,tommyip/zulip,synicalsyntax/zulip,zulip/zulip,jackrzhang/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,punchagan/zulip,rht/zulip,kou/zulip,andersk/zulip,kou/zulip,timabbott/zulip,jackrzhang/zulip,synicalsyntax/zulip,eeshangarg/zulip,brockwhittaker/zulip,tommyip/zulip,brainwane/zulip,dhcrzf/zulip,dhcrzf/zulip,rishig/zulip,showell/zulip,showell/zulip,andersk/zulip,hackerkid/zulip,brockwhittaker/zulip,andersk/zulip,showell/zulip,timabbott/zulip,dhcrzf/zulip,mahim97/zulip,jackrzhang/zulip,andersk/zulip,hackerkid/zulip,jackrzhang/zulip,jackrzhang/zulip,amanharitsh123/zulip,shubhamdhama/zulip,amanharitsh123/zulip,mahim97/zulip,kou/zulip,tommyip/zulip,dhcrzf/zulip,shubhamdhama/zulip,rht/zulip,rht/zulip,eeshangarg/zulip,shubhamdhama/zulip,jackrzhang/zulip,shubhamdhama/zulip,zulip/zulip,punchagan/zulip,rishig/zulip,shubhamdhama/zulip,hackerkid/zulip,shubhamdhama/zulip,dhcrzf/zulip,timabbott/zulip,rishig/zulip,punchagan/zulip,zulip/zulip,verma-varsha/zulip,showell/zulip,Galexrt/zulip,mahim97/zulip,Galexrt/zulip,verma-varsha/zulip,punchagan/zulip,hackerkid/zulip,jackrzhang/zulip,Galexrt/zulip,brainwane/zulip,hackerkid/zulip,synicalsyntax/zulip,eeshangarg/zulip,zulip/zulip,dhcrzf/zulip,andersk/zulip,brockwhittaker/zulip,mahim97/zulip,amanharitsh123/zulip,timabbott/zulip,synicalsyntax/zulip,rht/zulip,tommyip/zulip,synicalsyntax/zulip,verma-varsha/zulip,mahim97/zulip,Galexrt/zulip,andersk/zulip
|
from __future__ import unicode_literals
+ from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
+ from mock import patch
+ from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
+ from zerver.models import UserProfile
+ from typing import Text
+ import hashlib
+ # We hackishly patch this function in order to revert it to the state
+ # it had when this migration was first written. This is a balance
+ # between copying in a historical version of hundreds of lines of code
+ # from zerver.lib.upload (which would pretty annoying, but would be a
+ # pain) and just using the current version, which doesn't work
+ # since we rearranged the avatars in Zulip 1.6.
+ def patched_user_avatar_path(user_profile):
+ # type: (UserProfile) -> Text
+ email = user_profile.email
+ user_key = email.lower() + settings.AVATAR_SALT
+ return make_safe_digest(user_key, hashlib.sha1)
+ @patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
Make migration 0032 use an old version of user_avatar_path.
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import upload_backend
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
## Instruction:
Make migration 0032 use an old version of user_avatar_path.
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from mock import patch
from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
from zerver.models import UserProfile
from typing import Text
import hashlib
# We hackishly patch this function in order to revert it to the state
# it had when this migration was first written. This is a balance
# between copying in a historical version of hundreds of lines of code
# from zerver.lib.upload (which would pretty annoying, but would be a
# pain) and just using the current version, which doesn't work
# since we rearranged the avatars in Zulip 1.6.
def patched_user_avatar_path(user_profile):
# type: (UserProfile) -> Text
email = user_profile.email
user_key = email.lower() + settings.AVATAR_SALT
return make_safe_digest(user_key, hashlib.sha1)
@patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
from __future__ import unicode_literals
+ from django.conf import settings
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
+ from mock import patch
+ from zerver.lib.utils import make_safe_digest
from zerver.lib.upload import upload_backend
+ from zerver.models import UserProfile
+ from typing import Text
+ import hashlib
+ # We hackishly patch this function in order to revert it to the state
+ # it had when this migration was first written. This is a balance
+ # between copying in a historical version of hundreds of lines of code
+ # from zerver.lib.upload (which would pretty annoying, but would be a
+ # pain) and just using the current version, which doesn't work
+ # since we rearranged the avatars in Zulip 1.6.
+ def patched_user_avatar_path(user_profile):
+ # type: (UserProfile) -> Text
+ email = user_profile.email
+ user_key = email.lower() + settings.AVATAR_SALT
+ return make_safe_digest(user_key, hashlib.sha1)
+ @patch('zerver.lib.upload.user_avatar_path', patched_user_avatar_path)
def verify_medium_avatar_image(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
user_profile_model = apps.get_model('zerver', 'UserProfile')
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
upload_backend.ensure_medium_avatar_image(user_profile)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0031_remove_system_avatar_source'),
]
operations = [
migrations.RunPython(verify_medium_avatar_image)
]
|
0148b417a9ce8531383f2fb4e5500d9b32794f2c
|
byceps/services/party/settings_service.py
|
byceps/services/party/settings_service.py
|
from typing import Optional
from ...typing import PartyID
from .models.setting import Setting as DbSetting
from .transfer.models import PartySetting
def find_setting(party_id: PartyID, name: str) -> Optional[PartySetting]:
"""Return the setting for that party and with that name, or `None`
if not found.
"""
setting = DbSetting.query.get((party_id, name))
if setting is None:
return None
return _db_entity_to_party_setting(setting)
def _db_entity_to_party_setting(setting: DbSetting) -> PartySetting:
return PartySetting(
setting.party_id,
setting.name,
setting.value,
)
|
from typing import Optional
from ...database import db
from ...typing import PartyID
from .models.setting import Setting as DbSetting
from .transfer.models import PartySetting
def create_setting(party_id: PartyID, name: str, value: str) -> PartySetting:
"""Create a setting for that party."""
setting = DbSetting(party_id, name, value)
db.session.add(setting)
db.session.commit()
return _db_entity_to_party_setting(setting)
def find_setting(party_id: PartyID, name: str) -> Optional[PartySetting]:
"""Return the setting for that party and with that name, or `None`
if not found.
"""
setting = DbSetting.query.get((party_id, name))
if setting is None:
return None
return _db_entity_to_party_setting(setting)
def find_setting_value(party_id: PartyID, name: str) -> Optional[str]:
"""Return the value of the setting for that party and with that
name, or `None` if not found.
"""
setting = find_setting(party_id, name)
if setting is None:
return None
return setting.value
def _db_entity_to_party_setting(setting: DbSetting) -> PartySetting:
return PartySetting(
setting.party_id,
setting.name,
setting.value,
)
|
Add service functions to create a party setting and to obtain one's value
|
Add service functions to create a party setting and to obtain one's value
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
from typing import Optional
+ from ...database import db
from ...typing import PartyID
from .models.setting import Setting as DbSetting
from .transfer.models import PartySetting
+
+
+ def create_setting(party_id: PartyID, name: str, value: str) -> PartySetting:
+ """Create a setting for that party."""
+ setting = DbSetting(party_id, name, value)
+
+ db.session.add(setting)
+ db.session.commit()
+
+ return _db_entity_to_party_setting(setting)
def find_setting(party_id: PartyID, name: str) -> Optional[PartySetting]:
"""Return the setting for that party and with that name, or `None`
if not found.
"""
setting = DbSetting.query.get((party_id, name))
if setting is None:
return None
return _db_entity_to_party_setting(setting)
+ def find_setting_value(party_id: PartyID, name: str) -> Optional[str]:
+ """Return the value of the setting for that party and with that
+ name, or `None` if not found.
+ """
+ setting = find_setting(party_id, name)
+
+ if setting is None:
+ return None
+
+ return setting.value
+
+
def _db_entity_to_party_setting(setting: DbSetting) -> PartySetting:
return PartySetting(
setting.party_id,
setting.name,
setting.value,
)
|
Add service functions to create a party setting and to obtain one's value
|
## Code Before:
from typing import Optional
from ...typing import PartyID
from .models.setting import Setting as DbSetting
from .transfer.models import PartySetting
def find_setting(party_id: PartyID, name: str) -> Optional[PartySetting]:
"""Return the setting for that party and with that name, or `None`
if not found.
"""
setting = DbSetting.query.get((party_id, name))
if setting is None:
return None
return _db_entity_to_party_setting(setting)
def _db_entity_to_party_setting(setting: DbSetting) -> PartySetting:
return PartySetting(
setting.party_id,
setting.name,
setting.value,
)
## Instruction:
Add service functions to create a party setting and to obtain one's value
## Code After:
from typing import Optional
from ...database import db
from ...typing import PartyID
from .models.setting import Setting as DbSetting
from .transfer.models import PartySetting
def create_setting(party_id: PartyID, name: str, value: str) -> PartySetting:
"""Create a setting for that party."""
setting = DbSetting(party_id, name, value)
db.session.add(setting)
db.session.commit()
return _db_entity_to_party_setting(setting)
def find_setting(party_id: PartyID, name: str) -> Optional[PartySetting]:
"""Return the setting for that party and with that name, or `None`
if not found.
"""
setting = DbSetting.query.get((party_id, name))
if setting is None:
return None
return _db_entity_to_party_setting(setting)
def find_setting_value(party_id: PartyID, name: str) -> Optional[str]:
"""Return the value of the setting for that party and with that
name, or `None` if not found.
"""
setting = find_setting(party_id, name)
if setting is None:
return None
return setting.value
def _db_entity_to_party_setting(setting: DbSetting) -> PartySetting:
return PartySetting(
setting.party_id,
setting.name,
setting.value,
)
|
from typing import Optional
+ from ...database import db
from ...typing import PartyID
from .models.setting import Setting as DbSetting
from .transfer.models import PartySetting
+
+
+ def create_setting(party_id: PartyID, name: str, value: str) -> PartySetting:
+ """Create a setting for that party."""
+ setting = DbSetting(party_id, name, value)
+
+ db.session.add(setting)
+ db.session.commit()
+
+ return _db_entity_to_party_setting(setting)
def find_setting(party_id: PartyID, name: str) -> Optional[PartySetting]:
"""Return the setting for that party and with that name, or `None`
if not found.
"""
setting = DbSetting.query.get((party_id, name))
if setting is None:
return None
return _db_entity_to_party_setting(setting)
+ def find_setting_value(party_id: PartyID, name: str) -> Optional[str]:
+ """Return the value of the setting for that party and with that
+ name, or `None` if not found.
+ """
+ setting = find_setting(party_id, name)
+
+ if setting is None:
+ return None
+
+ return setting.value
+
+
def _db_entity_to_party_setting(setting: DbSetting) -> PartySetting:
return PartySetting(
setting.party_id,
setting.name,
setting.value,
)
|
2bab1888b43a9c232b37cc26c37df992ea5df2c5
|
project/apps/api/signals.py
|
project/apps/api/signals.py
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
|
Create sentinel rounds on Session creation
|
Create sentinel rounds on Session creation
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
+
+
+ @receiver(post_save, sender=Session)
+ def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
+ """Create sentinels."""
+ if not raw:
+ if created:
+ i = 1
+ while i <= instance.num_rounds:
+ instance.rounds.create(
+ num=i,
+ kind=(instance.num_rounds - i) + 1,
+ )
+ i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
-
- @receiver(post_save, sender=Session)
- def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
- """Create sentinels."""
- if not raw:
- if created:
- i = 1
- while i <= instance.num_rounds:
- instance.rounds.create(
- num=i,
- kind=(instance.num_rounds - i) + 1,
- )
- i += 1
-
|
Create sentinel rounds on Session creation
|
## Code Before:
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
## Instruction:
Create sentinel rounds on Session creation
## Code After:
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
+
+
+ @receiver(post_save, sender=Session)
+ def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
+ """Create sentinels."""
+ if not raw:
+ if created:
+ i = 1
+ while i <= instance.num_rounds:
+ instance.rounds.create(
+ num=i,
+ kind=(instance.num_rounds - i) + 1,
+ )
+ i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
-
-
- @receiver(post_save, sender=Session)
- def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
- """Create sentinels."""
- if not raw:
- if created:
- i = 1
- while i <= instance.num_rounds:
- instance.rounds.create(
- num=i,
- kind=(instance.num_rounds - i) + 1,
- )
- i += 1
|
433d9b2c1c29f32a7d5289e84673308c96302d8d
|
controlers/access.py
|
controlers/access.py
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
FIX a bug, you fuck'in forgot to rename the new function
|
FIX a bug, you fuck'in forgot to rename the new function
|
Python
|
agpl-3.0
|
cardmaster/makeclub,cardmaster/makeclub,cardmaster/makeclub
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
- "finish"
+ "finish",
+ "newact"
]
actOperatoins = [
- "create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
- def hasClubPrivilige (user, act, operation):
+ def hasActPrivilige (user, act, operation):
return True
|
FIX a bug, you fuck'in forgot to rename the new function
|
## Code Before:
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish"
]
actOperatoins = [
"create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasClubPrivilige (user, act, operation):
return True
## Instruction:
FIX a bug, you fuck'in forgot to rename the new function
## Code After:
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
"finish",
"newact"
]
actOperatoins = [
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
def hasActPrivilige (user, act, operation):
return True
|
'''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api import users
operations = [
"listclubs",
"createClub"
]
clubOperations = [
"view",
"create",
"edit",
"delete",
"arrange",
- "finish"
+ "finish",
? +
+ "newact"
]
actOperatoins = [
- "create",
"view",
"edit",
"join"
]
def isAccessible (user, operation):
return True
def hasClubPrivilige (user, club, operation):
return True
- def hasClubPrivilige (user, act, operation):
? ^^^^
+ def hasActPrivilige (user, act, operation):
? ^^^
return True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.