commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 52
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| fuzzy_diff
stringlengths 16
3.18k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0e807b46ba044e1accb8fb767f6f2ed4ffb2d0ba
|
dataportal/tests/test_broker.py
|
dataportal/tests/test_broker.py
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
Add coverage for basic broker usage.
|
TST: Add coverage for basic broker usage.
|
Python
|
bsd-3-clause
|
danielballan/dataportal,ericdill/datamuxer,danielballan/dataportal,NSLS-II/dataportal,NSLS-II/datamuxer,ericdill/datamuxer,tacaswell/dataportal,ericdill/databroker,danielballan/datamuxer,tacaswell/dataportal,danielballan/datamuxer,NSLS-II/dataportal,ericdill/databroker
|
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
+ from ..examples.sample_data import temperature_ramp
+ from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
- switch(channelarchiver=False, metadatastore=False, filestore=False)
+ switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
+ temperature_ramp.run()
+
+ def test_basic_usage(self):
+ header = db[-1]
+ events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
Add coverage for basic broker usage.
|
## Code Before:
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=False, filestore=False)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
## Instruction:
Add coverage for basic broker usage.
## Code After:
import unittest
from datetime import datetime
import numpy as np
import pandas as pd
from ..sources import channelarchiver as ca
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
class TestBroker(unittest.TestCase):
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
simulated_ca_data = generate_ca_data(['ch1', 'ch2'], start, end)
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
def tearDown(self):
switch(channelarchiver=True, metadatastore=True, filestore=True)
def generate_ca_data(channels, start_time, end_time):
timestamps = pd.date_range(start_time, end_time, freq='T').to_series()
timestamps = list(timestamps.dt.to_pydatetime()) # list of datetime objects
values = list(np.arange(len(timestamps)))
return {channel: (timestamps, values) for channel in channels}
|
...
from ..sources import switch
from ..examples.sample_data import temperature_ramp
from ..broker import DataBroker as db
...
def setUp(self):
switch(channelarchiver=False, metadatastore=True, filestore=True)
start, end = '2015-01-01 00:00:00', '2015-01-01 00:01:00'
...
ca.insert_data(simulated_ca_data)
temperature_ramp.run()
def test_basic_usage(self):
header = db[-1]
events = db.fetch_events(header)
...
|
05e61f1be4005edf2ff439ca2613bce8af217ff7
|
pubsubpull/models.py
|
pubsubpull/models.py
|
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from pubsubpull.fields import JSONB
class Request(models.Model):
"""A web request.
"""
user = models.ForeignKey(User, null=True, blank=True, related_name='requests')
method = models.CharField(max_length=20)
path = models.TextField()
OPERATION_TYPE = dict(I="INSERT", U="UPDATE", D="DELETE", T="TRUNCATE")
class UpdateLog(models.Model):
"""Store a change to a single row in a table.
"""
table = models.CharField(max_length=200)
type = models.CharField(max_length=1, choices=OPERATION_TYPE.items())
when = models.DateTimeField(auto_now_add=True)
request = models.ForeignKey(Request, null=True, blank=True,
related_name='changes')
old = JSONB(null=True, blank=True)
new = JSONB(null=True, blank=True)
def save(self, **kw):
raise ValidationError("Instances of this class cannot be using Django")
def __unicode__(self):
return u"%s %s @ %s" % (OPERATION_TYPE[self.type], self.table, self.when)
|
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from pubsubpull.fields import JSONB
class Request(models.Model):
"""A web request.
"""
user = models.ForeignKey(User, null=True, blank=True, related_name='requests')
method = models.CharField(max_length=20)
path = models.TextField()
def __unicode__(self):
return "%s %s" % (self.method, self.path)
OPERATION_TYPE = dict(I="INSERT", U="UPDATE", D="DELETE", T="TRUNCATE")
class UpdateLog(models.Model):
"""Store a change to a single row in a table.
"""
table = models.CharField(max_length=200)
type = models.CharField(max_length=1, choices=OPERATION_TYPE.items())
when = models.DateTimeField(auto_now_add=True)
request = models.ForeignKey(Request, null=True, blank=True,
related_name='changes')
old = JSONB(null=True, blank=True)
new = JSONB(null=True, blank=True)
def save(self, **kw):
raise ValidationError("Instances of this class cannot be using Django")
def __unicode__(self):
return u"%s %s @ %s" % (OPERATION_TYPE[self.type], self.table, self.when)
|
Add more useful display of the request data.
|
Add more useful display of the request data.
|
Python
|
mit
|
KayEss/django-pubsubpull,KayEss/django-pubsubpull,KayEss/django-pubsubpull
|
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from pubsubpull.fields import JSONB
class Request(models.Model):
"""A web request.
"""
user = models.ForeignKey(User, null=True, blank=True, related_name='requests')
method = models.CharField(max_length=20)
path = models.TextField()
+
+ def __unicode__(self):
+ return "%s %s" % (self.method, self.path)
OPERATION_TYPE = dict(I="INSERT", U="UPDATE", D="DELETE", T="TRUNCATE")
class UpdateLog(models.Model):
"""Store a change to a single row in a table.
"""
table = models.CharField(max_length=200)
type = models.CharField(max_length=1, choices=OPERATION_TYPE.items())
when = models.DateTimeField(auto_now_add=True)
request = models.ForeignKey(Request, null=True, blank=True,
related_name='changes')
old = JSONB(null=True, blank=True)
new = JSONB(null=True, blank=True)
def save(self, **kw):
raise ValidationError("Instances of this class cannot be using Django")
def __unicode__(self):
return u"%s %s @ %s" % (OPERATION_TYPE[self.type], self.table, self.when)
|
Add more useful display of the request data.
|
## Code Before:
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from pubsubpull.fields import JSONB
class Request(models.Model):
"""A web request.
"""
user = models.ForeignKey(User, null=True, blank=True, related_name='requests')
method = models.CharField(max_length=20)
path = models.TextField()
OPERATION_TYPE = dict(I="INSERT", U="UPDATE", D="DELETE", T="TRUNCATE")
class UpdateLog(models.Model):
"""Store a change to a single row in a table.
"""
table = models.CharField(max_length=200)
type = models.CharField(max_length=1, choices=OPERATION_TYPE.items())
when = models.DateTimeField(auto_now_add=True)
request = models.ForeignKey(Request, null=True, blank=True,
related_name='changes')
old = JSONB(null=True, blank=True)
new = JSONB(null=True, blank=True)
def save(self, **kw):
raise ValidationError("Instances of this class cannot be using Django")
def __unicode__(self):
return u"%s %s @ %s" % (OPERATION_TYPE[self.type], self.table, self.when)
## Instruction:
Add more useful display of the request data.
## Code After:
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db import models
from pubsubpull.fields import JSONB
class Request(models.Model):
"""A web request.
"""
user = models.ForeignKey(User, null=True, blank=True, related_name='requests')
method = models.CharField(max_length=20)
path = models.TextField()
def __unicode__(self):
return "%s %s" % (self.method, self.path)
OPERATION_TYPE = dict(I="INSERT", U="UPDATE", D="DELETE", T="TRUNCATE")
class UpdateLog(models.Model):
"""Store a change to a single row in a table.
"""
table = models.CharField(max_length=200)
type = models.CharField(max_length=1, choices=OPERATION_TYPE.items())
when = models.DateTimeField(auto_now_add=True)
request = models.ForeignKey(Request, null=True, blank=True,
related_name='changes')
old = JSONB(null=True, blank=True)
new = JSONB(null=True, blank=True)
def save(self, **kw):
raise ValidationError("Instances of this class cannot be using Django")
def __unicode__(self):
return u"%s %s @ %s" % (OPERATION_TYPE[self.type], self.table, self.when)
|
# ... existing code ...
path = models.TextField()
def __unicode__(self):
return "%s %s" % (self.method, self.path)
# ... rest of the code ...
|
7ddaa5a5f9bee7e21c1221950c50c8688e815e01
|
wtforms/ext/sqlalchemy/__init__.py
|
wtforms/ext/sqlalchemy/__init__.py
|
import warnings
warnings.warn(
'wtforms.ext.sqlalchemy is deprecated, and will be removed in WTForms 3.0. '
'Instead transition to the excellent WTForms-Alchemy package: '
'https://github.com/kvesteri/wtforms-alchemy',
DeprecationWarning
)
|
import warnings
warnings.warn(
'wtforms.ext.sqlalchemy is deprecated, and will be removed in WTForms 3.0. '
'The package has been extracted to a separate package wtforms_sqlalchemy: '
'https://github.com/wtforms/wtforms-sqlalchemy .\n'
'Or alternately, check out the WTForms-Alchemy package which provides declarative mapping and more: '
'https://github.com/kvesteri/wtforms-alchemy',
DeprecationWarning
)
|
Add pointer to WTForms-SQLAlchemy in Deprecation
|
Add pointer to WTForms-SQLAlchemy in Deprecation
Closes #221
|
Python
|
bsd-3-clause
|
crast/wtforms,cklein/wtforms,wtforms/wtforms
|
import warnings
warnings.warn(
'wtforms.ext.sqlalchemy is deprecated, and will be removed in WTForms 3.0. '
- 'Instead transition to the excellent WTForms-Alchemy package: '
+ 'The package has been extracted to a separate package wtforms_sqlalchemy: '
+ 'https://github.com/wtforms/wtforms-sqlalchemy .\n'
+ 'Or alternately, check out the WTForms-Alchemy package which provides declarative mapping and more: '
'https://github.com/kvesteri/wtforms-alchemy',
DeprecationWarning
)
|
Add pointer to WTForms-SQLAlchemy in Deprecation
|
## Code Before:
import warnings
warnings.warn(
'wtforms.ext.sqlalchemy is deprecated, and will be removed in WTForms 3.0. '
'Instead transition to the excellent WTForms-Alchemy package: '
'https://github.com/kvesteri/wtforms-alchemy',
DeprecationWarning
)
## Instruction:
Add pointer to WTForms-SQLAlchemy in Deprecation
## Code After:
import warnings
warnings.warn(
'wtforms.ext.sqlalchemy is deprecated, and will be removed in WTForms 3.0. '
'The package has been extracted to a separate package wtforms_sqlalchemy: '
'https://github.com/wtforms/wtforms-sqlalchemy .\n'
'Or alternately, check out the WTForms-Alchemy package which provides declarative mapping and more: '
'https://github.com/kvesteri/wtforms-alchemy',
DeprecationWarning
)
|
// ... existing code ...
'wtforms.ext.sqlalchemy is deprecated, and will be removed in WTForms 3.0. '
'The package has been extracted to a separate package wtforms_sqlalchemy: '
'https://github.com/wtforms/wtforms-sqlalchemy .\n'
'Or alternately, check out the WTForms-Alchemy package which provides declarative mapping and more: '
'https://github.com/kvesteri/wtforms-alchemy',
// ... rest of the code ...
|
84dee56df90d9181d1e79c3246ef389462f0ca17
|
configure_console_session.py
|
configure_console_session.py
|
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
|
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
|
Put imports as they are in init
|
Put imports as they are in init
|
Python
|
bsd-3-clause
|
nickpowersys/CaaR
|
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
- from comfort import cleanthermostat as ct
- from comfort import history as hi
- from comfort import histdaily as hd
- from comfort import histsummary as hs
+ from caar.cleanthermostat import dict_from_file
+ from caar.cleanthermostat import detect_columns
+ from caar.cleanthermostat import pickle_from_file
+
+ from caar.history import create_cycles_df
+ from caar.history import create_inside_df
+ from caar.history import create_outside_df
+ from caar.history import random_record
+
+ from caar.histsummary import days_of_data_by_id
+ from caar.histsummary import consecutive_days_of_observations
+ from caar.histsummary import daily_cycle_and_temp_obs_counts
+ from caar.histsummary import daily_data_points_by_id
+ from caar.histsummary import df_select_ids
+ from caar.histsummary import df_select_datetime_range
+ from caar.histsummary import count_of_data_points_for_each_id
+ from caar.histsummary import count_of_data_points_for_select_id
+ from caar.histsummary import location_id_of_thermo
+
+ from caar.timeseries import time_series_cycling_and_temps
+ from caar.timeseries import on_off_status
+ from caar.timeseries import temps_arr_by_freq
+ from caar.timeseries import plot_cycles_xy
+ from caar.timeseries import plot_temps_xy
+
|
Put imports as they are in init
|
## Code Before:
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
## Instruction:
Put imports as they are in init
## Code After:
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
|
// ... existing code ...
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
// ... modified code ...
// ... rest of the code ...
|
783af65a5e417a1828105d390f7096066929a4b7
|
ipywidgets/widgets/widget_core.py
|
ipywidgets/widgets/widget_core.py
|
"""Base widget class for widgets provided in Core"""
from .widget import Widget
from .._version import __jupyter_widget_version__
from traitlets import Unicode
class CoreWidget(Widget):
_model_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
_view_module_version = Unicode('*').tag(sync=True)
|
"""Base widget class for widgets provided in Core"""
from .widget import Widget
from .._version import __jupyter_widget_version__
from traitlets import Unicode
class CoreWidget(Widget):
_model_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
_view_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
|
Revert the versioning back until the versioning discussion is settled.
|
Revert the versioning back until the versioning discussion is settled.
|
Python
|
bsd-3-clause
|
ipython/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets
|
"""Base widget class for widgets provided in Core"""
from .widget import Widget
from .._version import __jupyter_widget_version__
from traitlets import Unicode
class CoreWidget(Widget):
_model_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
- _view_module_version = Unicode('*').tag(sync=True)
+ _view_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
|
Revert the versioning back until the versioning discussion is settled.
|
## Code Before:
"""Base widget class for widgets provided in Core"""
from .widget import Widget
from .._version import __jupyter_widget_version__
from traitlets import Unicode
class CoreWidget(Widget):
_model_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
_view_module_version = Unicode('*').tag(sync=True)
## Instruction:
Revert the versioning back until the versioning discussion is settled.
## Code After:
"""Base widget class for widgets provided in Core"""
from .widget import Widget
from .._version import __jupyter_widget_version__
from traitlets import Unicode
class CoreWidget(Widget):
_model_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
_view_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
|
...
_model_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
_view_module_version = Unicode(__jupyter_widget_version__).tag(sync=True)
...
|
a244623642cdf26bd6615cdc7ff2540c9361d10d
|
tmapi/models/typed.py
|
tmapi/models/typed.py
|
from django.db import models
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
self.type = construct_type
self.save()
|
from django.db import models
from tmapi.exceptions import ModelConstraintException
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
if construct_type is None:
raise ModelConstraintException
self.type = construct_type
self.save()
|
Raise an exception when setting a construct's type to None.
|
Raise an exception when setting a construct's type to None.
|
Python
|
apache-2.0
|
ajenhl/django-tmapi
|
from django.db import models
+
+ from tmapi.exceptions import ModelConstraintException
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
+ if construct_type is None:
+ raise ModelConstraintException
self.type = construct_type
self.save()
|
Raise an exception when setting a construct's type to None.
|
## Code Before:
from django.db import models
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
self.type = construct_type
self.save()
## Instruction:
Raise an exception when setting a construct's type to None.
## Code After:
from django.db import models
from tmapi.exceptions import ModelConstraintException
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
if construct_type is None:
raise ModelConstraintException
self.type = construct_type
self.save()
|
...
from django.db import models
from tmapi.exceptions import ModelConstraintException
...
"""
if construct_type is None:
raise ModelConstraintException
self.type = construct_type
...
|
f4a73fcc591d877003e9963f087d2473568bfa9d
|
python/ql/test/experimental/query-tests/Security/CWE-079/sendgrid_via_mail_send_post_request_body_bad.py
|
python/ql/test/experimental/query-tests/Security/CWE-079/sendgrid_via_mail_send_post_request_body_bad.py
|
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "[email protected]",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "[email protected]",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
|
import sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "[email protected]",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "[email protected]",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
|
Add RFS to `sendgrid` test
|
Add RFS to `sendgrid` test
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
import sendgrid
import os
+ from flask import request, Flask
+
+ app = Flask(__name__)
+ @app.route("/sendgrid")
+ def send():
- sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
+ sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
- data = {
+ data = {
- "content": [
+ "content": [
- {
+ {
- "type": "text/html",
+ "type": "text/html",
- "value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
+ "value": "<html>{}</html>".format(request.args["html_content"])
+ }
- }
+ ],
- ],
- "from": {
+ "from": {
- "email": "[email protected]",
+ "email": "[email protected]",
- "name": "Sam Smith"
+ "name": "Sam Smith"
- },
- "headers": {},
- "mail_settings": {
- "footer": {
- "enable": True,
- "html": "<p>Thanks</br>The SendGrid Team</p>",
- "text": "Thanks,/n The SendGrid Team"
},
+ "headers": {},
+ "mail_settings": {
+ "footer": {
+ "enable": True,
+ "html": "<html>{}</html>".format(request.args["html_footer"]),
+ "text": "Thanks,/n The SendGrid Team"
+ },
- },
+ },
- "reply_to": {
+ "reply_to": {
- "email": "[email protected]",
+ "email": "[email protected]",
- "name": "Sam Smith"
+ "name": "Sam Smith"
- },
+ },
- "send_at": 1409348513,
+ "send_at": 1409348513,
- "subject": "Hello, World!",
+ "subject": "Hello, World!",
- "template_id": "[YOUR TEMPLATE ID GOES HERE]",
+ "template_id": "[YOUR TEMPLATE ID GOES HERE]",
- "tracking_settings": {
+ "tracking_settings": {
- "subscription_tracking": {
+ "subscription_tracking": {
- "enable": True,
+ "enable": True,
- "html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
+ "html": "<html>{}</html>".format(request.args["html_tracking"]),
- "substitution_tag": "<%click here%>",
+ "substitution_tag": "<%click here%>",
- "text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
+ "text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
+ }
}
}
- }
- response = sg.client.mail.send.post(request_body=data)
+ response = sg.client.mail.send.post(request_body=data)
|
Add RFS to `sendgrid` test
|
## Code Before:
import sendgrid
import os
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html><p>Hello, world!</p><img src=[CID GOES HERE]></img></html>"
}
],
"from": {
"email": "[email protected]",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<p>Thanks</br>The SendGrid Team</p>",
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "[email protected]",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "If you would like to unsubscribe and stop receiving these emails <% clickhere %>.",
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
## Instruction:
Add RFS to `sendgrid` test
## Code After:
import sendgrid
import os
from flask import request, Flask
app = Flask(__name__)
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "[email protected]",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "[email protected]",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
}
response = sg.client.mail.send.post(request_body=data)
|
// ... existing code ...
import os
from flask import request, Flask
app = Flask(__name__)
// ... modified code ...
@app.route("/sendgrid")
def send():
sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))
data = {
"content": [
{
"type": "text/html",
"value": "<html>{}</html>".format(request.args["html_content"])
}
],
"from": {
"email": "[email protected]",
"name": "Sam Smith"
},
"headers": {},
"mail_settings": {
"footer": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_footer"]),
"text": "Thanks,/n The SendGrid Team"
},
},
"reply_to": {
"email": "[email protected]",
"name": "Sam Smith"
},
"send_at": 1409348513,
"subject": "Hello, World!",
"template_id": "[YOUR TEMPLATE ID GOES HERE]",
"tracking_settings": {
"subscription_tracking": {
"enable": True,
"html": "<html>{}</html>".format(request.args["html_tracking"]),
"substitution_tag": "<%click here%>",
"text": "If you would like to unsubscribe and stop receiving these emails <% click here %>."
}
}
...
}
response = sg.client.mail.send.post(request_body=data)
// ... rest of the code ...
|
c2d1621e089b10418785e173145fb16b0759df1a
|
lib/jasy/core/Info.py
|
lib/jasy/core/Info.py
|
import os, sys
def root():
""" Returns the root path of Jasy """
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
def cldrData(what):
return os.path.join(root(), "data", "cldr", what)
def localeProject(locale):
return os.path.join(root(), "data", "jslocale", locale)
def coreProject():
return os.path.join(root(), "data", "jscore")
|
import os, sys
def root():
""" Returns the root path of Jasy """
return os.path.relpath(os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)))
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
def cldrData(what):
return os.path.join(root(), "data", "cldr", what)
def localeProject(locale):
return os.path.join(root(), "data", "jslocale", locale)
def coreProject():
return os.path.join(root(), "data", "jscore")
|
Reduce path to shortest possible from current dir.
|
Reduce path to shortest possible from current dir.
|
Python
|
mit
|
zynga/jasy,sebastian-software/jasy,zynga/jasy,sebastian-software/jasy
|
import os, sys
def root():
""" Returns the root path of Jasy """
+ return os.path.relpath(os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)))
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
def cldrData(what):
return os.path.join(root(), "data", "cldr", what)
def localeProject(locale):
return os.path.join(root(), "data", "jslocale", locale)
def coreProject():
return os.path.join(root(), "data", "jscore")
|
Reduce path to shortest possible from current dir.
|
## Code Before:
import os, sys
def root():
""" Returns the root path of Jasy """
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
def cldrData(what):
return os.path.join(root(), "data", "cldr", what)
def localeProject(locale):
return os.path.join(root(), "data", "jslocale", locale)
def coreProject():
return os.path.join(root(), "data", "jscore")
## Instruction:
Reduce path to shortest possible from current dir.
## Code After:
import os, sys
def root():
""" Returns the root path of Jasy """
return os.path.relpath(os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)))
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
def cldrData(what):
return os.path.join(root(), "data", "cldr", what)
def localeProject(locale):
return os.path.join(root(), "data", "jslocale", locale)
def coreProject():
return os.path.join(root(), "data", "jscore")
|
// ... existing code ...
""" Returns the root path of Jasy """
return os.path.relpath(os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)))
return os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir))
// ... rest of the code ...
|
dd58dbbbdb9b3a9479fa5db38a4e4038a6514fef
|
configReader.py
|
configReader.py
|
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
pass
#If a new line is the first char
elif (item[0]=='\n'):
pass
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys
def getKeys(self):
return self.keys
|
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys
def getKeys(self):
return self.keys
|
Change 'pass' statements to 'continue' statements.
|
Change 'pass' statements to 'continue' statements.
|
Python
|
mit
|
ollien/PyConfigReader
|
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
- pass
+ continue
#If a new line is the first char
elif (item[0]=='\n'):
- pass
+ continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys
def getKeys(self):
return self.keys
+
|
Change 'pass' statements to 'continue' statements.
|
## Code Before:
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
pass
#If a new line is the first char
elif (item[0]=='\n'):
pass
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys
def getKeys(self):
return self.keys
## Instruction:
Change 'pass' statements to 'continue' statements.
## Code After:
class ConfigReader():
def __init__(self):
self.keys={}
#Read Keys from file
def readKeys(self):
keysFile=open("config.txt","r")
fileLines=keysFile.readlines()
keysFile.close()
self.keys.clear()
for item in fileLines:
#If last char is \n
if (item[-1]=='\n'):
item=item[:-1]
#If a commented line
if (item[0]=='#'):
continue
#If a new line is the first char
elif (item[0]=='\n'):
continue
else:
#Get Position of equal sign
pos=item.index('=')
#Name of the key is [0:pos], Value of the key is [pos+1:-1] (Stripping the \n char at the end)
self.keys[item[0:pos]]=item[pos+1:]
#Return the keys
def getKeys(self):
return self.keys
|
# ... existing code ...
if (item[0]=='#'):
continue
#If a new line is the first char
# ... modified code ...
elif (item[0]=='\n'):
continue
else:
# ... rest of the code ...
|
a44eecac4306504e7d3e6b8253deeb35e6b1fb43
|
numpy/typing/setup.py
|
numpy/typing/setup.py
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('typing', parent_package, top_path)
config.add_subpackage('tests')
config.add_data_dir('tests/data')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('typing', parent_package, top_path)
config.add_subpackage('tests')
config.add_data_dir('tests/data')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Add `.pyi` data files to the `numpy.typing` sub-package
|
BLD: Add `.pyi` data files to the `numpy.typing` sub-package
|
Python
|
bsd-3-clause
|
mattip/numpy,jakirkham/numpy,seberg/numpy,rgommers/numpy,anntzer/numpy,numpy/numpy,numpy/numpy,charris/numpy,jakirkham/numpy,anntzer/numpy,charris/numpy,pdebuyl/numpy,rgommers/numpy,charris/numpy,mattip/numpy,rgommers/numpy,simongibbons/numpy,mattip/numpy,jakirkham/numpy,pdebuyl/numpy,endolith/numpy,anntzer/numpy,seberg/numpy,endolith/numpy,pdebuyl/numpy,simongibbons/numpy,seberg/numpy,pdebuyl/numpy,anntzer/numpy,simongibbons/numpy,numpy/numpy,simongibbons/numpy,endolith/numpy,mhvk/numpy,jakirkham/numpy,mattip/numpy,jakirkham/numpy,seberg/numpy,mhvk/numpy,simongibbons/numpy,rgommers/numpy,mhvk/numpy,endolith/numpy,numpy/numpy,mhvk/numpy,mhvk/numpy,charris/numpy
|
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('typing', parent_package, top_path)
config.add_subpackage('tests')
config.add_data_dir('tests/data')
+ config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Add `.pyi` data files to the `numpy.typing` sub-package
|
## Code Before:
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('typing', parent_package, top_path)
config.add_subpackage('tests')
config.add_data_dir('tests/data')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
## Instruction:
Add `.pyi` data files to the `numpy.typing` sub-package
## Code After:
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('typing', parent_package, top_path)
config.add_subpackage('tests')
config.add_data_dir('tests/data')
config.add_data_files('*.pyi')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
// ... existing code ...
config.add_data_dir('tests/data')
config.add_data_files('*.pyi')
return config
// ... rest of the code ...
|
6f5be9af15898f089c3ee83ca1f05fbd4570fcfa
|
src/cms/apps/news/models.py
|
src/cms/apps/news/models.py
|
"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
|
"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
|
Set unique together on news article.
|
Set unique together on news article.
|
Python
|
bsd-3-clause
|
lewiscollard/cms,etianen/cms,etianen/cms,danielsamuels/cms,jamesfoley/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,dan-gamble/cms,jamesfoley/cms,danielsamuels/cms,etianen/cms,danielsamuels/cms
|
"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
+ unique_together = (("news_feed", "url_title",),)
-
-
Page.register_content(NewsFeed)
|
Set unique together on news article.
|
## Code Before:
"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
Page.register_content(NewsFeed)
## Instruction:
Set unique together on news article.
## Code After:
"""Models used by the news publication application."""
from django.db import models
from cms.apps.pages.models import Page, PageBase, PageField, HtmlField
from cms.apps.news.content import NewsFeed, NewsArticle
class Article(PageBase):
"""A news article."""
news_feed = PageField(Page,
"newsfeed")
content = HtmlField(blank=True,
null=True)
summary = models.TextField(blank=True,
null=True,
help_text="A short summary of this article. This will be used on news pages and RSS feeds. If not specified, then a summarized version of the content will be used.")
is_featured = models.BooleanField("featured",
default=False,
help_text="Featured articles will remain at the top of any news feeds.")
def get_absolute_url(self):
"""Returns the absolute URL of the article."""
return self.parent.content.reverse("article_detail", self.publication_date.year, self.publication_date.month, self.url_title, "")
class Meta:
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
Page.register_content(NewsFeed)
|
# ... existing code ...
verbose_name = "news article"
unique_together = (("news_feed", "url_title",),)
# ... rest of the code ...
|
a0903bb9fd988662269e9f2ef7e38acd877a63d5
|
src/nodeconductor_saltstack/saltstack/handlers.py
|
src/nodeconductor_saltstack/saltstack/handlers.py
|
from __future__ import unicode_literals
import logging
from .log import event_logger
logger = logging.getLogger(__name__)
def log_saltstack_property_created(sender, instance, created=False, **kwargs):
if created:
event_logger.saltstack_property.info(
'%s {property_name} has been created.' % instance.get_type_display_name(),
event_type='saltstack_property_creation_succeeded',
event_context={
'property': instance,
})
else:
event_logger.saltstack_property.info(
'%s {property_name} has been updated.' % instance.get_type_display_name(),
event_type='saltstack_property_update_succeeded',
event_context={
'property': instance,
})
def log_saltstack_property_deleted(sender, instance, **kwargs):
event_logger.saltstack_property.info(
'%s {property_name} has been deleted.' % instance.get_type_display_name(),
event_type='saltstack_property_deletion_succeeded',
event_context={
'property': instance,
})
|
from __future__ import unicode_literals
import logging
from .log import event_logger
logger = logging.getLogger(__name__)
def log_saltstack_property_created(sender, instance, created=False, **kwargs):
if created:
event_logger.saltstack_property.info(
'%s {property_name} has been created in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_creation_succeeded',
event_context={
'property': instance,
})
else:
event_logger.saltstack_property.info(
'%s {property_name} has been updated in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_update_succeeded',
event_context={
'property': instance,
})
def log_saltstack_property_deleted(sender, instance, **kwargs):
event_logger.saltstack_property.info(
'%s {property_name} has been deleted from {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_deletion_succeeded',
event_context={
'property': instance,
})
|
Add more details to event logs for property CRUD
|
Add more details to event logs for property CRUD
|
Python
|
mit
|
opennode/nodeconductor-saltstack
|
from __future__ import unicode_literals
import logging
from .log import event_logger
logger = logging.getLogger(__name__)
def log_saltstack_property_created(sender, instance, created=False, **kwargs):
if created:
event_logger.saltstack_property.info(
- '%s {property_name} has been created.' % instance.get_type_display_name(),
+ '%s {property_name} has been created in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_creation_succeeded',
event_context={
'property': instance,
})
else:
event_logger.saltstack_property.info(
- '%s {property_name} has been updated.' % instance.get_type_display_name(),
+ '%s {property_name} has been updated in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_update_succeeded',
event_context={
'property': instance,
})
def log_saltstack_property_deleted(sender, instance, **kwargs):
event_logger.saltstack_property.info(
- '%s {property_name} has been deleted.' % instance.get_type_display_name(),
+ '%s {property_name} has been deleted from {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_deletion_succeeded',
event_context={
'property': instance,
})
|
Add more details to event logs for property CRUD
|
## Code Before:
from __future__ import unicode_literals
import logging
from .log import event_logger
logger = logging.getLogger(__name__)
def log_saltstack_property_created(sender, instance, created=False, **kwargs):
if created:
event_logger.saltstack_property.info(
'%s {property_name} has been created.' % instance.get_type_display_name(),
event_type='saltstack_property_creation_succeeded',
event_context={
'property': instance,
})
else:
event_logger.saltstack_property.info(
'%s {property_name} has been updated.' % instance.get_type_display_name(),
event_type='saltstack_property_update_succeeded',
event_context={
'property': instance,
})
def log_saltstack_property_deleted(sender, instance, **kwargs):
event_logger.saltstack_property.info(
'%s {property_name} has been deleted.' % instance.get_type_display_name(),
event_type='saltstack_property_deletion_succeeded',
event_context={
'property': instance,
})
## Instruction:
Add more details to event logs for property CRUD
## Code After:
from __future__ import unicode_literals
import logging
from .log import event_logger
logger = logging.getLogger(__name__)
def log_saltstack_property_created(sender, instance, created=False, **kwargs):
if created:
event_logger.saltstack_property.info(
'%s {property_name} has been created in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_creation_succeeded',
event_context={
'property': instance,
})
else:
event_logger.saltstack_property.info(
'%s {property_name} has been updated in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_update_succeeded',
event_context={
'property': instance,
})
def log_saltstack_property_deleted(sender, instance, **kwargs):
event_logger.saltstack_property.info(
'%s {property_name} has been deleted from {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_deletion_succeeded',
event_context={
'property': instance,
})
|
...
event_logger.saltstack_property.info(
'%s {property_name} has been created in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_creation_succeeded',
...
event_logger.saltstack_property.info(
'%s {property_name} has been updated in {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_update_succeeded',
...
event_logger.saltstack_property.info(
'%s {property_name} has been deleted from {tenant_full_name}.' % instance.get_type_display_name(),
event_type='saltstack_property_deletion_succeeded',
...
|
91c6c7b8e8077a185e8a62af0c3bcb74d4026e7c
|
tests/search.py
|
tests/search.py
|
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
self.assertIsInstance(search[0], pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
self.assertEqual(search[0].id, 18166)
|
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
for v in search:
self.assertIsInstance(v, pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
self.assertEqual(18166, [s.id for s in search])
|
Check every result in Search test
|
Check every result in Search test
|
Python
|
mit
|
authmillenon/pycomicvine
|
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
+ for v in search:
- self.assertIsInstance(search[0], pycomicvine.Volume)
+ self.assertIsInstance(v, pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
- self.assertEqual(search[0].id, 18166)
+ self.assertEqual(18166, [s.id for s in search])
|
Check every result in Search test
|
## Code Before:
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
self.assertIsInstance(search[0], pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
self.assertEqual(search[0].id, 18166)
## Instruction:
Check every result in Search test
## Code After:
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
for v in search:
self.assertIsInstance(v, pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
self.assertEqual(18166, [s.id for s in search])
|
# ... existing code ...
)
for v in search:
self.assertIsInstance(v, pycomicvine.Volume)
# ... modified code ...
self.assertNotEqual(len(search),0)
self.assertEqual(18166, [s.id for s in search])
# ... rest of the code ...
|
d3933d58b2ebcb0fb0c6301344335ae018973774
|
n_pair_mc_loss.py
|
n_pair_mc_loss.py
|
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) + batch_l2_norm_squared(f_p))
loss = loss_sce + l2_reg * l2_loss
return loss
|
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) +
batch_l2_norm_squared(f_p)) / (2.0 * N)
loss = loss_sce + l2_reg * l2_loss
return loss
|
Modify to average the L2 norm loss of output vectors
|
Modify to average the L2 norm loss of output vectors
|
Python
|
mit
|
ronekko/deep_metric_learning
|
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
- l2_loss = sum(batch_l2_norm_squared(f) + batch_l2_norm_squared(f_p))
+ l2_loss = sum(batch_l2_norm_squared(f) +
+ batch_l2_norm_squared(f_p)) / (2.0 * N)
loss = loss_sce + l2_reg * l2_loss
return loss
|
Modify to average the L2 norm loss of output vectors
|
## Code Before:
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) + batch_l2_norm_squared(f_p))
loss = loss_sce + l2_reg * l2_loss
return loss
## Instruction:
Modify to average the L2 norm loss of output vectors
## Code After:
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) +
batch_l2_norm_squared(f_p)) / (2.0 * N)
loss = loss_sce + l2_reg * l2_loss
return loss
|
# ... existing code ...
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) +
batch_l2_norm_squared(f_p)) / (2.0 * N)
loss = loss_sce + l2_reg * l2_loss
# ... rest of the code ...
|
7c60024684024b604eb19a02d119adab547ed0d1
|
ovp_organizations/migrations/0023_auto_20170627_0236.py
|
ovp_organizations/migrations/0023_auto_20170627_0236.py
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ovp_organizations', '0022_auto_20170613_1424'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='address',
field=models.OneToOneField(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='ovp_core.SimpleAddress', verbose_name='address'),
),
]
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ovp_organizations', '0022_auto_20170613_1424'),
('ovp_core', '0011_simpleaddress'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='address',
field=models.OneToOneField(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='ovp_core.SimpleAddress', verbose_name='address'),
),
]
|
Add ovp_core_0011 migration as dependency for ovp_organizations_0023
|
Add ovp_core_0011 migration as dependency for ovp_organizations_0023
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-organizations,OpenVolunteeringPlatform/django-ovp-organizations
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ovp_organizations', '0022_auto_20170613_1424'),
+ ('ovp_core', '0011_simpleaddress'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='address',
field=models.OneToOneField(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='ovp_core.SimpleAddress', verbose_name='address'),
),
]
|
Add ovp_core_0011 migration as dependency for ovp_organizations_0023
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ovp_organizations', '0022_auto_20170613_1424'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='address',
field=models.OneToOneField(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='ovp_core.SimpleAddress', verbose_name='address'),
),
]
## Instruction:
Add ovp_core_0011 migration as dependency for ovp_organizations_0023
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ovp_organizations', '0022_auto_20170613_1424'),
('ovp_core', '0011_simpleaddress'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='address',
field=models.OneToOneField(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to='ovp_core.SimpleAddress', verbose_name='address'),
),
]
|
...
('ovp_organizations', '0022_auto_20170613_1424'),
('ovp_core', '0011_simpleaddress'),
]
...
|
69ff671582bb343bd2ac9515964a3913e29f3d72
|
oabutton/wsgi.py
|
oabutton/wsgi.py
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
# Enable Django secure mode (see http://security.stackexchange.com/a/8970)
os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Enable Django secure mode in WSGI module
|
Enable Django secure mode in WSGI module
|
Python
|
mit
|
OAButton/OAButton_old,OAButton/OAButton_old,OAButton/OAButton_old
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
+
+ # Enable Django secure mode (see http://security.stackexchange.com/a/8970)
+ os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Enable Django secure mode in WSGI module
|
## Code Before:
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
## Instruction:
Enable Django secure mode in WSGI module
## Code After:
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
# Enable Django secure mode (see http://security.stackexchange.com/a/8970)
os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
...
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
# Enable Django secure mode (see http://security.stackexchange.com/a/8970)
os.environ["HTTPS"] = "on"
...
|
6ef5a1a91e78c877a099b8c55df2f3f4d84686bb
|
bluesky/tests/test_vertical_integration.py
|
bluesky/tests/test_vertical_integration.py
|
from collections import defaultdict
from bluesky.examples import stepscan, det, motor
from bluesky.callbacks.broker import post_run, verify_files_saved
from functools import partial
def test_scan_and_get_data(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.mds.insert)
uid, = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
def test_post_run(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.mds.insert)
output = defaultdict(list)
def do_nothing(doctype, doc):
output[doctype].append(doc)
RE.ignore_callback_exceptions = False
RE(stepscan(det, motor), subs={'stop': [post_run(do_nothing, db=db)]})
assert len(output)
assert len(output['start']) == 1
assert len(output['stop']) == 1
assert len(output['descriptor']) == 1
assert len(output['event']) == 10
def test_verify_files_saved(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.mds.insert)
vfs = partial(verify_files_saved, db=db)
RE(stepscan(det, motor), subs={'stop': vfs})
|
from collections import defaultdict
from bluesky.examples import stepscan, det, motor
from bluesky.callbacks.broker import post_run, verify_files_saved
from functools import partial
def test_scan_and_get_data(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.insert)
uid, = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
list(hdr.events())
def test_post_run(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.insert)
output = defaultdict(list)
def do_nothing(doctype, doc):
output[doctype].append(doc)
RE(stepscan(det, motor), subs={'stop': [post_run(do_nothing, db=db)]})
assert len(output)
assert len(output['start']) == 1
assert len(output['stop']) == 1
assert len(output['descriptor']) == 1
assert len(output['event']) == 10
def test_verify_files_saved(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.insert)
vfs = partial(verify_files_saved, db=db)
RE(stepscan(det, motor), subs={'stop': vfs})
|
Update to new databroker API.
|
TST: Update to new databroker API.
|
Python
|
bsd-3-clause
|
ericdill/bluesky,ericdill/bluesky
|
from collections import defaultdict
from bluesky.examples import stepscan, det, motor
from bluesky.callbacks.broker import post_run, verify_files_saved
from functools import partial
def test_scan_and_get_data(fresh_RE, db):
RE = fresh_RE
- RE.subscribe(db.mds.insert)
+ RE.subscribe(db.insert)
uid, = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
- db.fetch_events(hdr)
+ list(hdr.events())
def test_post_run(fresh_RE, db):
RE = fresh_RE
- RE.subscribe(db.mds.insert)
+ RE.subscribe(db.insert)
output = defaultdict(list)
def do_nothing(doctype, doc):
output[doctype].append(doc)
-
- RE.ignore_callback_exceptions = False
RE(stepscan(det, motor), subs={'stop': [post_run(do_nothing, db=db)]})
assert len(output)
assert len(output['start']) == 1
assert len(output['stop']) == 1
assert len(output['descriptor']) == 1
assert len(output['event']) == 10
def test_verify_files_saved(fresh_RE, db):
RE = fresh_RE
- RE.subscribe(db.mds.insert)
+ RE.subscribe(db.insert)
vfs = partial(verify_files_saved, db=db)
RE(stepscan(det, motor), subs={'stop': vfs})
|
Update to new databroker API.
|
## Code Before:
from collections import defaultdict
from bluesky.examples import stepscan, det, motor
from bluesky.callbacks.broker import post_run, verify_files_saved
from functools import partial
def test_scan_and_get_data(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.mds.insert)
uid, = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
db.fetch_events(hdr)
def test_post_run(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.mds.insert)
output = defaultdict(list)
def do_nothing(doctype, doc):
output[doctype].append(doc)
RE.ignore_callback_exceptions = False
RE(stepscan(det, motor), subs={'stop': [post_run(do_nothing, db=db)]})
assert len(output)
assert len(output['start']) == 1
assert len(output['stop']) == 1
assert len(output['descriptor']) == 1
assert len(output['event']) == 10
def test_verify_files_saved(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.mds.insert)
vfs = partial(verify_files_saved, db=db)
RE(stepscan(det, motor), subs={'stop': vfs})
## Instruction:
Update to new databroker API.
## Code After:
from collections import defaultdict
from bluesky.examples import stepscan, det, motor
from bluesky.callbacks.broker import post_run, verify_files_saved
from functools import partial
def test_scan_and_get_data(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.insert)
uid, = RE(stepscan(det, motor), group='foo', beamline_id='testing',
config={})
hdr = db[uid]
list(hdr.events())
def test_post_run(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.insert)
output = defaultdict(list)
def do_nothing(doctype, doc):
output[doctype].append(doc)
RE(stepscan(det, motor), subs={'stop': [post_run(do_nothing, db=db)]})
assert len(output)
assert len(output['start']) == 1
assert len(output['stop']) == 1
assert len(output['descriptor']) == 1
assert len(output['event']) == 10
def test_verify_files_saved(fresh_RE, db):
RE = fresh_RE
RE.subscribe(db.insert)
vfs = partial(verify_files_saved, db=db)
RE(stepscan(det, motor), subs={'stop': vfs})
|
...
RE = fresh_RE
RE.subscribe(db.insert)
uid, = RE(stepscan(det, motor), group='foo', beamline_id='testing',
...
hdr = db[uid]
list(hdr.events())
...
RE = fresh_RE
RE.subscribe(db.insert)
output = defaultdict(list)
...
output[doctype].append(doc)
...
RE = fresh_RE
RE.subscribe(db.insert)
...
|
2ad9cf280ee1743f1ad542d3c0c8d8365caea11e
|
condatestall.py
|
condatestall.py
|
from __future__ import print_function
import itertools
import subprocess
import os
import sys
NPY = '16', '17'
PY = '26', '27', '33'
RECIPE_DIR = "./buildscripts/condarecipe.local"
def main():
failfast = '-v' in sys.argv[1:]
args = "conda build %s --no-binstar-upload" % RECIPE_DIR
failures = []
for py, npy in itertools.product(PY, NPY):
if py == '33' and npy == '16':
# Skip python3 + numpy16
continue
os.environ['CONDA_PY'] = py
os.environ['CONDA_NPY'] = npy
try:
subprocess.check_call(args.split())
except subprocess.CalledProcessError as e:
failures.append((py, npy, e))
if failfast:
break
print("=" * 80)
if failures:
for py, npy, err in failures:
print("Test failed for python %s numpy %s" % (py, npy))
print(err)
else:
print("All Passed")
if __name__ == '__main__':
main()
|
from __future__ import print_function
import itertools
import subprocess
import os
import sys
if '-q' in sys.argv[1:]:
NPY = '18',
else:
NPY = '16', '17', '18'
PY = '26', '27', '33'
RECIPE_DIR = "./buildscripts/condarecipe.local"
def main():
failfast = '-v' in sys.argv[1:]
args = "conda build %s --no-binstar-upload" % RECIPE_DIR
failures = []
for py, npy in itertools.product(PY, NPY):
if py == '33' and npy == '16':
# Skip python3 + numpy16
continue
os.environ['CONDA_PY'] = py
os.environ['CONDA_NPY'] = npy
try:
subprocess.check_call(args.split())
except subprocess.CalledProcessError as e:
failures.append((py, npy, e))
if failfast:
break
print("=" * 80)
if failures:
for py, npy, err in failures:
print("Test failed for python %s numpy %s" % (py, npy))
print(err)
else:
print("All Passed")
if __name__ == '__main__':
main()
|
Add option for quick test on all python version
|
Add option for quick test on all python version
|
Python
|
bsd-2-clause
|
pitrou/numba,GaZ3ll3/numba,pombredanne/numba,GaZ3ll3/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,gmarkall/numba,cpcloud/numba,gdementen/numba,ssarangi/numba,seibert/numba,sklam/numba,gdementen/numba,jriehl/numba,gmarkall/numba,IntelLabs/numba,pombredanne/numba,stuartarchibald/numba,jriehl/numba,stuartarchibald/numba,gmarkall/numba,gmarkall/numba,gdementen/numba,stuartarchibald/numba,pombredanne/numba,pombredanne/numba,cpcloud/numba,numba/numba,ssarangi/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,IntelLabs/numba,pitrou/numba,jriehl/numba,numba/numba,gmarkall/numba,jriehl/numba,ssarangi/numba,gdementen/numba,stonebig/numba,sklam/numba,gdementen/numba,IntelLabs/numba,seibert/numba,seibert/numba,stefanseefeld/numba,stefanseefeld/numba,IntelLabs/numba,GaZ3ll3/numba,stefanseefeld/numba,pombredanne/numba,jriehl/numba,ssarangi/numba,sklam/numba,numba/numba,GaZ3ll3/numba,stefanseefeld/numba,sklam/numba,ssarangi/numba,stonebig/numba,stuartarchibald/numba,stefanseefeld/numba,stonebig/numba,stonebig/numba,GaZ3ll3/numba,pitrou/numba,cpcloud/numba,stonebig/numba,seibert/numba,sklam/numba,pitrou/numba,pitrou/numba
|
from __future__ import print_function
import itertools
import subprocess
import os
import sys
+ if '-q' in sys.argv[1:]:
+ NPY = '18',
+ else:
- NPY = '16', '17'
+ NPY = '16', '17', '18'
PY = '26', '27', '33'
RECIPE_DIR = "./buildscripts/condarecipe.local"
def main():
failfast = '-v' in sys.argv[1:]
args = "conda build %s --no-binstar-upload" % RECIPE_DIR
failures = []
for py, npy in itertools.product(PY, NPY):
if py == '33' and npy == '16':
# Skip python3 + numpy16
continue
os.environ['CONDA_PY'] = py
os.environ['CONDA_NPY'] = npy
try:
subprocess.check_call(args.split())
except subprocess.CalledProcessError as e:
failures.append((py, npy, e))
if failfast:
break
print("=" * 80)
if failures:
for py, npy, err in failures:
print("Test failed for python %s numpy %s" % (py, npy))
print(err)
else:
print("All Passed")
if __name__ == '__main__':
main()
|
Add option for quick test on all python version
|
## Code Before:
from __future__ import print_function
import itertools
import subprocess
import os
import sys
NPY = '16', '17'
PY = '26', '27', '33'
RECIPE_DIR = "./buildscripts/condarecipe.local"
def main():
failfast = '-v' in sys.argv[1:]
args = "conda build %s --no-binstar-upload" % RECIPE_DIR
failures = []
for py, npy in itertools.product(PY, NPY):
if py == '33' and npy == '16':
# Skip python3 + numpy16
continue
os.environ['CONDA_PY'] = py
os.environ['CONDA_NPY'] = npy
try:
subprocess.check_call(args.split())
except subprocess.CalledProcessError as e:
failures.append((py, npy, e))
if failfast:
break
print("=" * 80)
if failures:
for py, npy, err in failures:
print("Test failed for python %s numpy %s" % (py, npy))
print(err)
else:
print("All Passed")
if __name__ == '__main__':
main()
## Instruction:
Add option for quick test on all python version
## Code After:
from __future__ import print_function
import itertools
import subprocess
import os
import sys
if '-q' in sys.argv[1:]:
NPY = '18',
else:
NPY = '16', '17', '18'
PY = '26', '27', '33'
RECIPE_DIR = "./buildscripts/condarecipe.local"
def main():
failfast = '-v' in sys.argv[1:]
args = "conda build %s --no-binstar-upload" % RECIPE_DIR
failures = []
for py, npy in itertools.product(PY, NPY):
if py == '33' and npy == '16':
# Skip python3 + numpy16
continue
os.environ['CONDA_PY'] = py
os.environ['CONDA_NPY'] = npy
try:
subprocess.check_call(args.split())
except subprocess.CalledProcessError as e:
failures.append((py, npy, e))
if failfast:
break
print("=" * 80)
if failures:
for py, npy, err in failures:
print("Test failed for python %s numpy %s" % (py, npy))
print(err)
else:
print("All Passed")
if __name__ == '__main__':
main()
|
# ... existing code ...
if '-q' in sys.argv[1:]:
NPY = '18',
else:
NPY = '16', '17', '18'
PY = '26', '27', '33'
# ... rest of the code ...
|
94e6443a3eeb1bf76121ab2030a90c5631f32ff8
|
landscapesim/serializers/regions.py
|
landscapesim/serializers/regions.py
|
import json
from rest_framework import serializers
from django.core.urlresolvers import reverse
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
class Meta:
model = Region
fields = ('id', 'name', 'url')
def get_url(self, obj):
return reverse('region-reporting-units', args=[obj.id])
|
import json
from rest_framework import serializers
from django.core.urlresolvers import reverse
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
data = serializers.SerializerMethodField()
class Meta:
model = Region
fields = ('id', 'name', 'url', 'data',)
def get_url(self, obj):
return reverse('region-reporting-units', args=[obj.id])
def get_data(self, obj):
if self.context.get('request').GET.get('return_data') == 'true':
return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data
return None
|
Allow data to be returned on the same request.
|
Allow data to be returned on the same request.
|
Python
|
bsd-3-clause
|
consbio/landscapesim,consbio/landscapesim,consbio/landscapesim
|
import json
from rest_framework import serializers
from django.core.urlresolvers import reverse
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
+ data = serializers.SerializerMethodField()
class Meta:
model = Region
- fields = ('id', 'name', 'url')
+ fields = ('id', 'name', 'url', 'data',)
def get_url(self, obj):
return reverse('region-reporting-units', args=[obj.id])
+ def get_data(self, obj):
+ if self.context.get('request').GET.get('return_data') == 'true':
+ return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data
+ return None
+
|
Allow data to be returned on the same request.
|
## Code Before:
import json
from rest_framework import serializers
from django.core.urlresolvers import reverse
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
class Meta:
model = Region
fields = ('id', 'name', 'url')
def get_url(self, obj):
return reverse('region-reporting-units', args=[obj.id])
## Instruction:
Allow data to be returned on the same request.
## Code After:
import json
from rest_framework import serializers
from django.core.urlresolvers import reverse
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
data = serializers.SerializerMethodField()
class Meta:
model = Region
fields = ('id', 'name', 'url', 'data',)
def get_url(self, obj):
return reverse('region-reporting-units', args=[obj.id])
def get_data(self, obj):
if self.context.get('request').GET.get('return_data') == 'true':
return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data
return None
|
...
url = serializers.SerializerMethodField()
data = serializers.SerializerMethodField()
...
model = Region
fields = ('id', 'name', 'url', 'data',)
...
return reverse('region-reporting-units', args=[obj.id])
def get_data(self, obj):
if self.context.get('request').GET.get('return_data') == 'true':
return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data
return None
...
|
c97e5cf11fc21e2ef4ee04779a424e4d6a2b96ae
|
tools/perf/metrics/__init__.py
|
tools/perf/metrics/__init__.py
|
class Metric(object):
"""Base class for all the metrics that are used by telemetry measurements.
The Metric class represents a way of measuring something. Metrics are
helper classes used by PageMeasurements. Each PageMeasurement may use
multiple metrics; each metric should be focussed on collecting data
about one thing.
"""
def Start(self, page, tab):
"""Start collecting data for this metric."""
raise NotImplementedError()
def Stop(self, page, tab):
"""Stop collecting data for this metric (if applicable)."""
raise NotImplementedError()
def AddResults(self, tab, results):
"""Add the data collected into the results object for a measurement.
Metrics may implement AddResults to provide a common way to add results
to the PageMeasurementResults in PageMeasurement.AddMeasurement --
results should be added with results.Add(trace_name, unit, value).
"""
raise NotImplementedError()
|
class Metric(object):
"""Base class for all the metrics that are used by telemetry measurements.
The Metric class represents a way of measuring something. Metrics are
helper classes used by PageMeasurements. Each PageMeasurement may use
multiple metrics; each metric should be focussed on collecting data
about one thing.
"""
def CustomizeBrowserOptions(self, options):
"""Add browser options that are required by this metric.
Some metrics do not have any special browser options that need
to be added, and they do not need to override this method; by
default, no browser options are added.
To add options here, call options.AppendExtraBrowserArg(arg).
"""
pass
def Start(self, page, tab):
"""Start collecting data for this metric."""
raise NotImplementedError()
def Stop(self, page, tab):
"""Stop collecting data for this metric (if applicable)."""
raise NotImplementedError()
def AddResults(self, tab, results):
"""Add the data collected into the results object for a measurement.
Metrics may implement AddResults to provide a common way to add results
to the PageMeasurementResults in PageMeasurement.AddMeasurement --
results should be added with results.Add(trace_name, unit, value).
"""
raise NotImplementedError()
|
Add CustomizeBrowserOptions method to Metric base class
|
Add CustomizeBrowserOptions method to Metric base class
BUG=271177
Review URL: https://chromiumcodereview.appspot.com/22938004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217198 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
mogoweb/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,ChromiumWebApps/chromium,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,jaruba/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,patrickm/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,jaruba/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,patrickm/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Jonekee/chromium.src,anirudhSK/chromium,markYoungH/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,ltilve/chromium,patrickm/chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,patrickm/chromium.src,M4sse/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,jaruba/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,dednal/chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,dednal/chromium.src,dushu1203/chromium.src,ltilve/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,littlstar/chromium.src,dednal/chromium.src,dednal/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,ltilve/chromium,patrickm/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src
|
class Metric(object):
"""Base class for all the metrics that are used by telemetry measurements.
The Metric class represents a way of measuring something. Metrics are
helper classes used by PageMeasurements. Each PageMeasurement may use
multiple metrics; each metric should be focussed on collecting data
about one thing.
"""
+
+ def CustomizeBrowserOptions(self, options):
+ """Add browser options that are required by this metric.
+
+ Some metrics do not have any special browser options that need
+ to be added, and they do not need to override this method; by
+ default, no browser options are added.
+
+ To add options here, call options.AppendExtraBrowserArg(arg).
+ """
+ pass
def Start(self, page, tab):
"""Start collecting data for this metric."""
raise NotImplementedError()
def Stop(self, page, tab):
"""Stop collecting data for this metric (if applicable)."""
raise NotImplementedError()
def AddResults(self, tab, results):
"""Add the data collected into the results object for a measurement.
Metrics may implement AddResults to provide a common way to add results
to the PageMeasurementResults in PageMeasurement.AddMeasurement --
results should be added with results.Add(trace_name, unit, value).
"""
raise NotImplementedError()
|
Add CustomizeBrowserOptions method to Metric base class
|
## Code Before:
class Metric(object):
"""Base class for all the metrics that are used by telemetry measurements.
The Metric class represents a way of measuring something. Metrics are
helper classes used by PageMeasurements. Each PageMeasurement may use
multiple metrics; each metric should be focussed on collecting data
about one thing.
"""
def Start(self, page, tab):
"""Start collecting data for this metric."""
raise NotImplementedError()
def Stop(self, page, tab):
"""Stop collecting data for this metric (if applicable)."""
raise NotImplementedError()
def AddResults(self, tab, results):
"""Add the data collected into the results object for a measurement.
Metrics may implement AddResults to provide a common way to add results
to the PageMeasurementResults in PageMeasurement.AddMeasurement --
results should be added with results.Add(trace_name, unit, value).
"""
raise NotImplementedError()
## Instruction:
Add CustomizeBrowserOptions method to Metric base class
## Code After:
class Metric(object):
"""Base class for all the metrics that are used by telemetry measurements.
The Metric class represents a way of measuring something. Metrics are
helper classes used by PageMeasurements. Each PageMeasurement may use
multiple metrics; each metric should be focussed on collecting data
about one thing.
"""
def CustomizeBrowserOptions(self, options):
"""Add browser options that are required by this metric.
Some metrics do not have any special browser options that need
to be added, and they do not need to override this method; by
default, no browser options are added.
To add options here, call options.AppendExtraBrowserArg(arg).
"""
pass
def Start(self, page, tab):
"""Start collecting data for this metric."""
raise NotImplementedError()
def Stop(self, page, tab):
"""Stop collecting data for this metric (if applicable)."""
raise NotImplementedError()
def AddResults(self, tab, results):
"""Add the data collected into the results object for a measurement.
Metrics may implement AddResults to provide a common way to add results
to the PageMeasurementResults in PageMeasurement.AddMeasurement --
results should be added with results.Add(trace_name, unit, value).
"""
raise NotImplementedError()
|
// ... existing code ...
"""
def CustomizeBrowserOptions(self, options):
"""Add browser options that are required by this metric.
Some metrics do not have any special browser options that need
to be added, and they do not need to override this method; by
default, no browser options are added.
To add options here, call options.AppendExtraBrowserArg(arg).
"""
pass
// ... rest of the code ...
|
d55389580160c4585c131537c04c4045a38ea134
|
fluxghost/http_server_base.py
|
fluxghost/http_server_base.py
|
from select import select
import logging
import socket
logger = logging.getLogger("HTTPServer")
from fluxghost.http_handlers.websocket_handler import WebSocketHandler
from fluxghost.http_handlers.file_handler import FileHandler
class HttpServerBase(object):
def __init__(self, assets_path, address, backlog=10):
self.assets_handler = FileHandler(assets_path)
self.ws_handler = WebSocketHandler()
self.sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(address)
s.listen(backlog)
logger.info("Listen HTTP on %s:%s" % address)
def serve_forever(self):
self.running = True
args = ((self.sock, ), (), (), 30.)
while self.running:
try:
rl = select(*args)[0]
if rl:
self.on_accept()
except InterruptedError:
pass
|
from select import select
import logging
import socket
logger = logging.getLogger("HTTPServer")
from fluxghost.http_handlers.websocket_handler import WebSocketHandler
from fluxghost.http_handlers.file_handler import FileHandler
class HttpServerBase(object):
def __init__(self, assets_path, address, backlog=10):
self.assets_handler = FileHandler(assets_path)
self.ws_handler = WebSocketHandler()
self.sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(address)
s.listen(backlog)
if address[1] == 0:
from sys import stdout
address = s.getsockname()
stdout.write("LISTEN ON %i\n" % address[1])
stdout.flush()
logger.info("Listen HTTP on %s:%s" % address)
def serve_forever(self):
self.running = True
args = ((self.sock, ), (), (), 30.)
while self.running:
try:
rl = select(*args)[0]
if rl:
self.on_accept()
except InterruptedError:
pass
|
Add auto select port function
|
Add auto select port function
|
Python
|
agpl-3.0
|
flux3dp/fluxghost,flux3dp/fluxghost,flux3dp/fluxghost,flux3dp/fluxghost
|
from select import select
import logging
import socket
logger = logging.getLogger("HTTPServer")
from fluxghost.http_handlers.websocket_handler import WebSocketHandler
from fluxghost.http_handlers.file_handler import FileHandler
class HttpServerBase(object):
def __init__(self, assets_path, address, backlog=10):
self.assets_handler = FileHandler(assets_path)
self.ws_handler = WebSocketHandler()
self.sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(address)
s.listen(backlog)
+
+ if address[1] == 0:
+ from sys import stdout
+ address = s.getsockname()
+ stdout.write("LISTEN ON %i\n" % address[1])
+ stdout.flush()
+
logger.info("Listen HTTP on %s:%s" % address)
def serve_forever(self):
self.running = True
args = ((self.sock, ), (), (), 30.)
while self.running:
try:
rl = select(*args)[0]
if rl:
self.on_accept()
except InterruptedError:
pass
|
Add auto select port function
|
## Code Before:
from select import select
import logging
import socket
logger = logging.getLogger("HTTPServer")
from fluxghost.http_handlers.websocket_handler import WebSocketHandler
from fluxghost.http_handlers.file_handler import FileHandler
class HttpServerBase(object):
def __init__(self, assets_path, address, backlog=10):
self.assets_handler = FileHandler(assets_path)
self.ws_handler = WebSocketHandler()
self.sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(address)
s.listen(backlog)
logger.info("Listen HTTP on %s:%s" % address)
def serve_forever(self):
self.running = True
args = ((self.sock, ), (), (), 30.)
while self.running:
try:
rl = select(*args)[0]
if rl:
self.on_accept()
except InterruptedError:
pass
## Instruction:
Add auto select port function
## Code After:
from select import select
import logging
import socket
logger = logging.getLogger("HTTPServer")
from fluxghost.http_handlers.websocket_handler import WebSocketHandler
from fluxghost.http_handlers.file_handler import FileHandler
class HttpServerBase(object):
def __init__(self, assets_path, address, backlog=10):
self.assets_handler = FileHandler(assets_path)
self.ws_handler = WebSocketHandler()
self.sock = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(address)
s.listen(backlog)
if address[1] == 0:
from sys import stdout
address = s.getsockname()
stdout.write("LISTEN ON %i\n" % address[1])
stdout.flush()
logger.info("Listen HTTP on %s:%s" % address)
def serve_forever(self):
self.running = True
args = ((self.sock, ), (), (), 30.)
while self.running:
try:
rl = select(*args)[0]
if rl:
self.on_accept()
except InterruptedError:
pass
|
// ... existing code ...
s.listen(backlog)
if address[1] == 0:
from sys import stdout
address = s.getsockname()
stdout.write("LISTEN ON %i\n" % address[1])
stdout.flush()
logger.info("Listen HTTP on %s:%s" % address)
// ... rest of the code ...
|
16c567f27e1e4979321d319ddb334c263b43443f
|
gitcv/gitcv.py
|
gitcv/gitcv.py
|
import os
import yaml
from git import Repo
class GitCv:
def __init__(self, cv_path, repo_path):
self._cv = self._load_cv(cv_path)
self._repo_path = os.path.join(repo_path, 'cv')
def _load_cv(self, cv_path):
with open(cv_path, "r") as f:
cv = yaml.load(f)
return cv
def _create_repo(self):
self._repo = Repo.init(self._repo_path)
def _create_branches(self):
for stream in self._cv:
for entry in stream:
self._create_branch(entry)
def _create_branch(self, branch_name):
self._repo.create_head(branch_name)
def _create_file_and_commit(self, file_name):
open(os.path.join(self._repo_path, file_name), 'w').close()
self._repo.index.add([file_name])
self._repo.index.commit('Add {0}'.format(file_name))
def create(self):
self._create_repo()
self._create_file_and_commit('dummy.txt')
self._create_branches()
if __name__ == '__main__':
GitCv('../cv.yaml', '../target').create()
|
import os
import yaml
from git import Repo
class GitCv:
def __init__(self, cv_path, repo_path):
self._repo_path = os.path.join(repo_path, 'cv')
self._cv_path = cv_path
self._load_cv()
def _load_cv(self):
with open(self._cv_path, "r") as f:
self._cv = yaml.load(f)
def _create_repo(self):
self._repo = Repo.init(self._repo_path)
def _create_branches(self):
for stream in self._cv:
for entry in stream:
self._create_branch(entry)
def _create_branch(self, branch_name):
self._repo.create_head(branch_name)
def _create_file_and_commit(self, file_name):
open(os.path.join(self._repo_path, file_name), 'w').close()
self._repo.index.add([file_name])
self._repo.index.commit('Add {0}'.format(file_name))
def create(self):
self._create_repo()
self._create_file_and_commit('dummy.txt')
self._create_branches()
if __name__ == '__main__':
GitCv('../cv.yaml', '../target').create()
|
Make cv path class attribute
|
Make cv path class attribute
|
Python
|
mit
|
jangroth/git-cv,jangroth/git-cv
|
import os
import yaml
from git import Repo
class GitCv:
def __init__(self, cv_path, repo_path):
- self._cv = self._load_cv(cv_path)
self._repo_path = os.path.join(repo_path, 'cv')
+ self._cv_path = cv_path
+ self._load_cv()
- def _load_cv(self, cv_path):
+ def _load_cv(self):
- with open(cv_path, "r") as f:
+ with open(self._cv_path, "r") as f:
- cv = yaml.load(f)
+ self._cv = yaml.load(f)
- return cv
def _create_repo(self):
self._repo = Repo.init(self._repo_path)
def _create_branches(self):
for stream in self._cv:
for entry in stream:
self._create_branch(entry)
def _create_branch(self, branch_name):
self._repo.create_head(branch_name)
def _create_file_and_commit(self, file_name):
open(os.path.join(self._repo_path, file_name), 'w').close()
self._repo.index.add([file_name])
self._repo.index.commit('Add {0}'.format(file_name))
def create(self):
self._create_repo()
self._create_file_and_commit('dummy.txt')
self._create_branches()
if __name__ == '__main__':
GitCv('../cv.yaml', '../target').create()
|
Make cv path class attribute
|
## Code Before:
import os
import yaml
from git import Repo
class GitCv:
def __init__(self, cv_path, repo_path):
self._cv = self._load_cv(cv_path)
self._repo_path = os.path.join(repo_path, 'cv')
def _load_cv(self, cv_path):
with open(cv_path, "r") as f:
cv = yaml.load(f)
return cv
def _create_repo(self):
self._repo = Repo.init(self._repo_path)
def _create_branches(self):
for stream in self._cv:
for entry in stream:
self._create_branch(entry)
def _create_branch(self, branch_name):
self._repo.create_head(branch_name)
def _create_file_and_commit(self, file_name):
open(os.path.join(self._repo_path, file_name), 'w').close()
self._repo.index.add([file_name])
self._repo.index.commit('Add {0}'.format(file_name))
def create(self):
self._create_repo()
self._create_file_and_commit('dummy.txt')
self._create_branches()
if __name__ == '__main__':
GitCv('../cv.yaml', '../target').create()
## Instruction:
Make cv path class attribute
## Code After:
import os
import yaml
from git import Repo
class GitCv:
def __init__(self, cv_path, repo_path):
self._repo_path = os.path.join(repo_path, 'cv')
self._cv_path = cv_path
self._load_cv()
def _load_cv(self):
with open(self._cv_path, "r") as f:
self._cv = yaml.load(f)
def _create_repo(self):
self._repo = Repo.init(self._repo_path)
def _create_branches(self):
for stream in self._cv:
for entry in stream:
self._create_branch(entry)
def _create_branch(self, branch_name):
self._repo.create_head(branch_name)
def _create_file_and_commit(self, file_name):
open(os.path.join(self._repo_path, file_name), 'w').close()
self._repo.index.add([file_name])
self._repo.index.commit('Add {0}'.format(file_name))
def create(self):
self._create_repo()
self._create_file_and_commit('dummy.txt')
self._create_branches()
if __name__ == '__main__':
GitCv('../cv.yaml', '../target').create()
|
// ... existing code ...
def __init__(self, cv_path, repo_path):
self._repo_path = os.path.join(repo_path, 'cv')
self._cv_path = cv_path
self._load_cv()
def _load_cv(self):
with open(self._cv_path, "r") as f:
self._cv = yaml.load(f)
// ... rest of the code ...
|
6f5e987b5a102b0c4b0bfcd88c17faab00655142
|
ctypeslib/test/test_toolchain.py
|
ctypeslib/test/test_toolchain.py
|
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
if __name__ == "__main__":
import unittest
unittest.main()
|
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
def test_windows(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "stdio.h",
"-o", "_stdio_gen.xml"])
xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"])
import _stdio_gen
if __name__ == "__main__":
import unittest
unittest.main()
|
Add a test for stdio.h.
|
Add a test for stdio.h.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@60472 6015fed2-1504-0410-9fe1-9d1591cc4771
|
Python
|
mit
|
trolldbois/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib,luzfcb/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib
|
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
- def test(self):
+ def test_windows(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
+ def test(self):
+ h2xml.main(["h2xml", "-q",
+ "-D WIN32_LEAN_AND_MEAN",
+ "-D _UNICODE", "-D UNICODE",
+ "-c", "stdio.h",
+ "-o", "_stdio_gen.xml"])
+ xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"])
+ import _stdio_gen
+
+
if __name__ == "__main__":
import unittest
unittest.main()
|
Add a test for stdio.h.
|
## Code Before:
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
if __name__ == "__main__":
import unittest
unittest.main()
## Instruction:
Add a test for stdio.h.
## Code After:
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
def test_windows(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "stdio.h",
"-o", "_stdio_gen.xml"])
xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"])
import _stdio_gen
if __name__ == "__main__":
import unittest
unittest.main()
|
// ... existing code ...
if sys.platform == "win32":
def test_windows(self):
h2xml.main(["h2xml", "-q",
// ... modified code ...
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "stdio.h",
"-o", "_stdio_gen.xml"])
xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"])
import _stdio_gen
if __name__ == "__main__":
// ... rest of the code ...
|
3544f211913ba67f0bd7e433c23d2e5b22bba719
|
lightcurve_pipeline/database/reset_database.py
|
lightcurve_pipeline/database/reset_database.py
|
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print 'Resetting database'
base.metadata.drop_all()
base.metadata.create_all()
|
from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print('Resetting database')
base.metadata.drop_all()
base.metadata.create_all()
|
Change the print statement to use __future__.
|
Change the print statement to use __future__.
|
Python
|
bsd-3-clause
|
justincely/lightcurve_pipeline
|
+
+ from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
- print 'Resetting database'
+ print('Resetting database')
base.metadata.drop_all()
base.metadata.create_all()
|
Change the print statement to use __future__.
|
## Code Before:
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print 'Resetting database'
base.metadata.drop_all()
base.metadata.create_all()
## Instruction:
Change the print statement to use __future__.
## Code After:
from __future__ import print_function
from lightcurve_pipeline.database.database_interface import base
from lightcurve_pipeline.utils.utils import SETTINGS
if __name__ == '__main__':
prompt = 'About to reset database instance {}. '.format(SETTINGS['db_connection_string'])
prompt += 'Do you wish to proceed? (y/n)'
response = raw_input(prompt)
if response.lower() == 'y':
print('Resetting database')
base.metadata.drop_all()
base.metadata.create_all()
|
...
from __future__ import print_function
...
if response.lower() == 'y':
print('Resetting database')
base.metadata.drop_all()
...
|
da2e34ca3371f0898df8b3181ba98132bd9a26e4
|
txircd/modbase.py
|
txircd/modbase.py
|
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
|
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
Add a function for commands to process parameters
|
Add a function for commands to process parameters
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,DesertBus/txircd,Heufneutje/txircd
|
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
+ def processParams(self, user, params):
+ return {
+ "user": user,
+ "params": params
+ }
|
Add a function for commands to process parameters
|
## Code Before:
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
## Instruction:
Add a function for commands to process parameters
## Code After:
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
// ... existing code ...
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
// ... rest of the code ...
|
47d9a8df136e235f49921d4782c5e392b0101107
|
migrations/versions/147_add_cleaned_subject.py
|
migrations/versions/147_add_cleaned_subject.py
|
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread',
['namespace_id', '_cleaned_subject'], unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
|
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'],
unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
|
Make _cleaned_subject migration match declared schema.
|
Make _cleaned_subject migration match declared schema.
Test Plan: Upgrade old database to head.
Reviewers: kav-ya
Reviewed By: kav-ya
Differential Revision: https://review.inboxapp.com/D1394
|
Python
|
agpl-3.0
|
Eagles2F/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,closeio/nylas,nylas/sync-engine,gale320/sync-engine,gale320/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,closeio/nylas,jobscore/sync-engine,gale320/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine
|
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
- op.create_index('ix_cleaned_subject', 'thread',
+ op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'],
- ['namespace_id', '_cleaned_subject'], unique=False)
+ unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
|
Make _cleaned_subject migration match declared schema.
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread',
['namespace_id', '_cleaned_subject'], unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
## Instruction:
Make _cleaned_subject migration match declared schema.
## Code After:
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'],
unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
|
// ... existing code ...
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'],
unique=False)
// ... rest of the code ...
|
1ef76b4f4395c9b5e3c2338822947999d5581013
|
labs/lab-3/ex-3-2.events.py
|
labs/lab-3/ex-3-2.events.py
|
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
Add type field to source
|
Add type field to source
|
Python
|
apache-2.0
|
jdgwartney/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab
|
import tspapi
api = tspapi.API()
- source = tspapi.Source(ref='myhost')
+ source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
Add type field to source
|
## Code Before:
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
## Instruction:
Add type field to source
## Code After:
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
# ... existing code ...
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
# ... rest of the code ...
|
e466da4f26d8cbac45476e8c00e009e004cd4baa
|
fluent_blogs/templatetags/fluent_blogs_comments_tags.py
|
fluent_blogs/templatetags/fluent_blogs_comments_tags.py
|
# Expose the tag library in the site.
# If `django.contrib.comments` is not used, this library can provide stubs instead.
# Currently, the real tags are exposed as the template already checks for `object.comments_are_open`.
# When a custom template is used, authors likely choose the desired commenting library instead.
from django.contrib.comments.templatetags.comments import register
|
from django.template import Library
from fluent_utils.django_compat import is_installed
# Expose the tag library in the site.
# If `django.contrib.comments` is not used, this library can provide stubs instead.
# Currently, the real tags are exposed as the template already checks for `object.comments_are_open`.
# When a custom template is used, authors likely choose the desired commenting library instead.
if is_installed('django.contrib.comments'):
from django.contrib.comments.templatetags.comments import register
elif is_installed('django_comments'):
from django_comments.templatetags.comments import register
else:
register = Library()
|
Support django-contrib-comments instead of django.contrib.comments for Django 1.8
|
Support django-contrib-comments instead of django.contrib.comments for Django 1.8
|
Python
|
apache-2.0
|
edoburu/django-fluent-blogs,edoburu/django-fluent-blogs
|
+ from django.template import Library
+ from fluent_utils.django_compat import is_installed
# Expose the tag library in the site.
# If `django.contrib.comments` is not used, this library can provide stubs instead.
# Currently, the real tags are exposed as the template already checks for `object.comments_are_open`.
# When a custom template is used, authors likely choose the desired commenting library instead.
+ if is_installed('django.contrib.comments'):
- from django.contrib.comments.templatetags.comments import register
+ from django.contrib.comments.templatetags.comments import register
+ elif is_installed('django_comments'):
+ from django_comments.templatetags.comments import register
+ else:
+ register = Library()
|
Support django-contrib-comments instead of django.contrib.comments for Django 1.8
|
## Code Before:
# Expose the tag library in the site.
# If `django.contrib.comments` is not used, this library can provide stubs instead.
# Currently, the real tags are exposed as the template already checks for `object.comments_are_open`.
# When a custom template is used, authors likely choose the desired commenting library instead.
from django.contrib.comments.templatetags.comments import register
## Instruction:
Support django-contrib-comments instead of django.contrib.comments for Django 1.8
## Code After:
from django.template import Library
from fluent_utils.django_compat import is_installed
# Expose the tag library in the site.
# If `django.contrib.comments` is not used, this library can provide stubs instead.
# Currently, the real tags are exposed as the template already checks for `object.comments_are_open`.
# When a custom template is used, authors likely choose the desired commenting library instead.
if is_installed('django.contrib.comments'):
from django.contrib.comments.templatetags.comments import register
elif is_installed('django_comments'):
from django_comments.templatetags.comments import register
else:
register = Library()
|
// ... existing code ...
from django.template import Library
from fluent_utils.django_compat import is_installed
// ... modified code ...
if is_installed('django.contrib.comments'):
from django.contrib.comments.templatetags.comments import register
elif is_installed('django_comments'):
from django_comments.templatetags.comments import register
else:
register = Library()
// ... rest of the code ...
|
737e2877cfad9ea801641b72094633a7c0178a44
|
UM/Settings/__init__.py
|
UM/Settings/__init__.py
|
from .SettingDefinition import SettingDefinition
from .SettingInstance import SettingInstance
from .DefinitionContainer import DefinitionContainer
from .InstanceContainer import InstanceContainer
from .ContainerStack import ContainerStack
from .Validator import Validator
|
from .ContainerRegistry import ContainerRegistry
from .SettingDefinition import SettingDefinition
from .SettingInstance import SettingInstance
from .DefinitionContainer import DefinitionContainer
from .InstanceContainer import InstanceContainer
from .ContainerStack import ContainerStack
from .Validator import Validator
|
Add ContainerRegistry to the exposed classes of UM.Settings
|
Add ContainerRegistry to the exposed classes of UM.Settings
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
+
+ from .ContainerRegistry import ContainerRegistry
from .SettingDefinition import SettingDefinition
-
from .SettingInstance import SettingInstance
from .DefinitionContainer import DefinitionContainer
from .InstanceContainer import InstanceContainer
from .ContainerStack import ContainerStack
from .Validator import Validator
|
Add ContainerRegistry to the exposed classes of UM.Settings
|
## Code Before:
from .SettingDefinition import SettingDefinition
from .SettingInstance import SettingInstance
from .DefinitionContainer import DefinitionContainer
from .InstanceContainer import InstanceContainer
from .ContainerStack import ContainerStack
from .Validator import Validator
## Instruction:
Add ContainerRegistry to the exposed classes of UM.Settings
## Code After:
from .ContainerRegistry import ContainerRegistry
from .SettingDefinition import SettingDefinition
from .SettingInstance import SettingInstance
from .DefinitionContainer import DefinitionContainer
from .InstanceContainer import InstanceContainer
from .ContainerStack import ContainerStack
from .Validator import Validator
|
...
from .ContainerRegistry import ContainerRegistry
...
from .SettingDefinition import SettingDefinition
from .SettingInstance import SettingInstance
...
|
49e95022577eb40bcf9e1d1c9f95be7269fd0e3b
|
scripts/update_acq_stats.py
|
scripts/update_acq_stats.py
|
from mica.stats import update_acq_stats
update_acq_stats.main()
import os
table_file = mica.stats.acq_stats.table_file
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
Fix reference to acq table file in script
|
Fix reference to acq table file in script
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
-
+ import os
from mica.stats import update_acq_stats
+ import mica.stats.acq_stats
update_acq_stats.main()
- import os
+
- table_file = mica.stats.acq_stats.table_file
+ table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
Fix reference to acq table file in script
|
## Code Before:
from mica.stats import update_acq_stats
update_acq_stats.main()
import os
table_file = mica.stats.acq_stats.table_file
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
## Instruction:
Fix reference to acq table file in script
## Code After:
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 50e6:
print("""
Warning: {tfile} is larger than 50MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
// ... existing code ...
import os
from mica.stats import update_acq_stats
import mica.stats.acq_stats
update_acq_stats.main()
// ... modified code ...
table_file = mica.stats.acq_stats.TABLE_FILE
file_stat = os.stat(table_file)
// ... rest of the code ...
|
4c124f151c2f8d466840b10e7ed53395b3d587dc
|
UM/Math/Ray.py
|
UM/Math/Ray.py
|
from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
|
from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
|
Add a convenience method to get a point along a ray
|
Add a convenience method to get a point along a ray
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
+ def getPointAlongRay(self, distance):
+ return self._origin + (self._direction * distance)
+
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
|
Add a convenience method to get a point along a ray
|
## Code Before:
from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
## Instruction:
Add a convenience method to get a point along a ray
## Code After:
from UM.Math.Vector import Vector
class Ray:
def __init__(self, origin = Vector(), direction = Vector()):
self._origin = origin
self._direction = direction
self._invDirection = 1.0 / direction
@property
def origin(self):
return self._origin
@property
def direction(self):
return self._direction
@property
def inverseDirection(self):
return self._invDirection
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
return "Ray(origin = {0}, direction = {1})".format(self._origin, self._direction)
|
# ... existing code ...
def getPointAlongRay(self, distance):
return self._origin + (self._direction * distance)
def __repr__(self):
# ... rest of the code ...
|
592ffbcd7fbbc29bfd377b5abadb39aa29f1c88d
|
foyer/tests/conftest.py
|
foyer/tests/conftest.py
|
import pytest
@pytest.fixture(scope="session")
def initdir(tmpdir):
tmpdir.chdir()
|
import pytest
@pytest.fixture(autouse=True)
def initdir(tmpdir):
tmpdir.chdir()
|
Switch from scope="session" to autouse=True
|
Switch from scope="session" to autouse=True
|
Python
|
mit
|
iModels/foyer,mosdef-hub/foyer,mosdef-hub/foyer,iModels/foyer
|
import pytest
- @pytest.fixture(scope="session")
+ @pytest.fixture(autouse=True)
def initdir(tmpdir):
tmpdir.chdir()
|
Switch from scope="session" to autouse=True
|
## Code Before:
import pytest
@pytest.fixture(scope="session")
def initdir(tmpdir):
tmpdir.chdir()
## Instruction:
Switch from scope="session" to autouse=True
## Code After:
import pytest
@pytest.fixture(autouse=True)
def initdir(tmpdir):
tmpdir.chdir()
|
// ... existing code ...
@pytest.fixture(autouse=True)
def initdir(tmpdir):
// ... rest of the code ...
|
69de2261c30a8bab1ac4d0749cf32baec49e0cc4
|
webapp/byceps/blueprints/board/views.py
|
webapp/byceps/blueprints/board/views.py
|
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
|
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
Throw 404 if category/topic with given id is not found.
|
Throw 404 if category/topic with given id is not found.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
- category = Category.query.get(id)
+ category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
- topic = Topic.query.get(id)
+ topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
Throw 404 if category/topic with given id is not found.
|
## Code Before:
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get(id)
return {'topic': topic}
## Instruction:
Throw 404 if category/topic with given id is not found.
## Code After:
from ...util.framework import create_blueprint
from ...util.templating import templated
from ..authorization.registry import permission_registry
from .authorization import BoardPostingPermission, BoardTopicPermission
from .models import Category, Topic
blueprint = create_blueprint('board', __name__)
permission_registry.register_enum('board_topic', BoardTopicPermission)
permission_registry.register_enum('board_posting', BoardPostingPermission)
@blueprint.route('/categories')
@templated
def category_index():
"""List categories."""
categories = Category.query.for_current_brand().all()
return {'categories': categories}
@blueprint.route('/categories/<id>')
@templated
def category_view(id):
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
@blueprint.route('/topics/<id>')
@templated
def topic_view(id):
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
|
// ... existing code ...
"""List latest topics in the category."""
category = Category.query.get_or_404(id)
return {'category': category}
// ... modified code ...
"""List postings for the topic."""
topic = Topic.query.get_or_404(id)
return {'topic': topic}
// ... rest of the code ...
|
d70ccd856bb4ddb061ff608716ef15f778380d62
|
gnsq/stream/defalte.py
|
gnsq/stream/defalte.py
|
from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
self._decompressor = zlib.decompressobj(level)
self._compressor = zlib.compressobj(level)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
|
from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
wbits = -zlib.MAX_WBITS
self._decompressor = zlib.decompressobj(wbits)
self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
|
Set correct waits for deflate.
|
Set correct waits for deflate.
|
Python
|
bsd-3-clause
|
wtolson/gnsq,hiringsolved/gnsq,wtolson/gnsq
|
from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
+ wbits = -zlib.MAX_WBITS
- self._decompressor = zlib.decompressobj(level)
+ self._decompressor = zlib.decompressobj(wbits)
- self._compressor = zlib.compressobj(level)
+ self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
|
Set correct waits for deflate.
|
## Code Before:
from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
self._decompressor = zlib.decompressobj(level)
self._compressor = zlib.compressobj(level)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
## Instruction:
Set correct waits for deflate.
## Code After:
from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
wbits = -zlib.MAX_WBITS
self._decompressor = zlib.decompressobj(wbits)
self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
|
// ... existing code ...
def __init__(self, socket, level):
wbits = -zlib.MAX_WBITS
self._decompressor = zlib.decompressobj(wbits)
self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits)
super(DefalteSocket, self).__init__(socket)
// ... rest of the code ...
|
eaa13f9005a8aaf8c748a98de697b03eee9e675b
|
salt/client/netapi.py
|
salt/client/netapi.py
|
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
if not len(self.netapi):
logger.error("Did not find any netapi configurations, nothing to start")
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
Add log error if we run salt-api w/ no config
|
Add log error if we run salt-api w/ no config
Currently, the salt-api script will exit with no error or hint of why it
failed if there is no netapi module configured. Added a short line if
we find no api modules to start, warning the user that the config may be
missing.
Fixes #28240
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
+ if not len(self.netapi):
+ logger.error("Did not find any netapi configurations, nothing to start")
+
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
Add log error if we run salt-api w/ no config
|
## Code Before:
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
## Instruction:
Add log error if we run salt-api w/ no config
## Code After:
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
if not len(self.netapi):
logger.error("Did not find any netapi configurations, nothing to start")
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
...
'''
if not len(self.netapi):
logger.error("Did not find any netapi configurations, nothing to start")
for fun in self.netapi:
...
|
888584a49e697551c4f680cc8651be2fe80fc65d
|
configgen/generators/ppsspp/ppssppGenerator.py
|
configgen/generators/ppsspp/ppssppGenerator.py
|
import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
# Write emu.cfg to map joysticks, init with the default emu.cfg
Config = ConfigParser.ConfigParser()
Config.read(recalboxFiles.reicastConfigInit)
section = "input"
# For each pad detected
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
# the command to run
#~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"]
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
|
import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
break
# the command to run
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
# The next line is a reminder on how to quit PPSSPP with just the HK
#commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
|
Remove a bad typo from reicast
|
Remove a bad typo from reicast
|
Python
|
mit
|
nadenislamarre/recalbox-configgen,recalbox/recalbox-configgen,digitalLumberjack/recalbox-configgen
|
import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
- # Write emu.cfg to map joysticks, init with the default emu.cfg
- Config = ConfigParser.ConfigParser()
- Config.read(recalboxFiles.reicastConfigInit)
- section = "input"
- # For each pad detected
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
+ break
# the command to run
- #~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"]
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
+ # The next line is a reminder on how to quit PPSSPP with just the HK
+ #commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
|
Remove a bad typo from reicast
|
## Code Before:
import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
# Write emu.cfg to map joysticks, init with the default emu.cfg
Config = ConfigParser.ConfigParser()
Config.read(recalboxFiles.reicastConfigInit)
section = "input"
# For each pad detected
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
# the command to run
#~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"]
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
## Instruction:
Remove a bad typo from reicast
## Code After:
import Command
#~ import reicastControllers
import recalboxFiles
from generators.Generator import Generator
import ppssppControllers
import shutil
import os.path
import ConfigParser
class PPSSPPGenerator(Generator):
# Main entry of the module
# Configure fba and return a command
def generate(self, system, rom, playersControllers):
if not system.config['configfile']:
for index in playersControllers :
controller = playersControllers[index]
# we only care about player 1
if controller.player != "1":
continue
ppssppControllers.generateControllerConfig(controller)
break
# the command to run
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
# The next line is a reminder on how to quit PPSSPP with just the HK
#commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
|
...
if not system.config['configfile']:
for index in playersControllers :
...
ppssppControllers.generateControllerConfig(controller)
break
...
# the command to run
commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom]
# The next line is a reminder on how to quit PPSSPP with just the HK
#commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
...
|
10379c2210b39d507af61530c56c1dbfa8cf5307
|
pbxplore/demo/__init__.py
|
pbxplore/demo/__init__.py
|
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
"""
return os.listdir(DEMO_DATA_PATH)
|
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [demo_file for demo_file in os.listdir(DEMO_DATA_PATH)
if not demo_file[0] in '_.']
def list_demo_files_absolute():
"""
List the absolute path to the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [os.path.join(DEMO_DATA_PATH, demo_file) for demo_file
in list_demo_files()]
|
Add a function to list absolute path to demo files
|
Add a function to list absolute path to demo files
|
Python
|
mit
|
jbarnoud/PBxplore,jbarnoud/PBxplore,pierrepo/PBxplore,HubLot/PBxplore,pierrepo/PBxplore,HubLot/PBxplore
|
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
+
+ File names starting with _ or . are not listed. This allows to omit
+ __init__.py, and hiden files.
"""
- return os.listdir(DEMO_DATA_PATH)
+ return [demo_file for demo_file in os.listdir(DEMO_DATA_PATH)
+ if not demo_file[0] in '_.']
+ def list_demo_files_absolute():
+ """
+ List the absolute path to the bundled demo files
+
+ File names starting with _ or . are not listed. This allows to omit
+ __init__.py, and hiden files.
+ """
+ return [os.path.join(DEMO_DATA_PATH, demo_file) for demo_file
+ in list_demo_files()]
+
|
Add a function to list absolute path to demo files
|
## Code Before:
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
"""
return os.listdir(DEMO_DATA_PATH)
## Instruction:
Add a function to list absolute path to demo files
## Code After:
import os
DEMO_DATA_PATH=os.path.abspath(os.path.dirname(__file__))
def list_demo_files():
"""
List the names of the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [demo_file for demo_file in os.listdir(DEMO_DATA_PATH)
if not demo_file[0] in '_.']
def list_demo_files_absolute():
"""
List the absolute path to the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [os.path.join(DEMO_DATA_PATH, demo_file) for demo_file
in list_demo_files()]
|
# ... existing code ...
List the names of the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [demo_file for demo_file in os.listdir(DEMO_DATA_PATH)
if not demo_file[0] in '_.']
def list_demo_files_absolute():
"""
List the absolute path to the bundled demo files
File names starting with _ or . are not listed. This allows to omit
__init__.py, and hiden files.
"""
return [os.path.join(DEMO_DATA_PATH, demo_file) for demo_file
in list_demo_files()]
# ... rest of the code ...
|
d59f3259875ffac49668ffb3ce34ca511385ebb7
|
rated/settings.py
|
rated/settings.py
|
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
|
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
USE_X_FORWARDED_FOR = getattr(settings, 'USE_X_FORWARDED_FOR', False)
|
Fix USE_X_FORWARDED_FOR for proxied environments
|
Fix USE_X_FORWARDED_FOR for proxied environments
The settings for USE_X_FORWARDED_FOR were not being respected because the settings were not being passed through.
|
Python
|
bsd-3-clause
|
funkybob/django-rated
|
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
+ USE_X_FORWARDED_FOR = getattr(settings, 'USE_X_FORWARDED_FOR', False)
|
Fix USE_X_FORWARDED_FOR for proxied environments
|
## Code Before:
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
## Instruction:
Fix USE_X_FORWARDED_FOR for proxied environments
## Code After:
from django.conf import settings
DEFAULT_REALM = getattr(settings, 'RATED_DEFAULT_REALM', 'default')
DEFAULT_LIMIT = getattr(settings, 'RATED_DEFAULT_LIMIT', 100)
DEFAULT_DURATION = getattr(settings, 'RATED_DEFAULT_DURATION', 60 * 60)
RESPONSE_CODE = getattr(settings, 'RATED_RESPONSE_CODE', 429)
RESPONSE_MESSAGE = getattr(settings, 'RATED_RESPONSE_MESSAGE', '')
DEFAULT_WHITELIST = getattr(settings, 'RATED_DEFAULT_WHITELIST', [])
REALMS = getattr(settings, 'RATED_REALMS', {})
REALM_MAP = getattr(settings, 'RATED_REALM_MAP', {})
# Redis config parameters
REDIS = getattr(settings, 'RATED_REDIS', {})
USE_X_FORWARDED_FOR = getattr(settings, 'USE_X_FORWARDED_FOR', False)
|
// ... existing code ...
USE_X_FORWARDED_FOR = getattr(settings, 'USE_X_FORWARDED_FOR', False)
// ... rest of the code ...
|
77a5ecc7c406e4a6acf814a2f0381dc605e0d14c
|
leds/led_dance.py
|
leds/led_dance.py
|
import pyb
def led_dance(delay):
dots = {}
control = pyb.Switch(1)
while True:
if not control.value():
dots[pyb.millis() % 25] = 16
for d in dots:
pyb.pixel(d, dots[d])
if dots[d] == 0:
del(dots[d])
else:
dots[d] = int(dots[d]/2)
pyb.delay(delay)
led_dance(101)
|
import microbit
def led_dance(delay):
dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ]
microbit.display.set_display_mode(1)
while True:
dots[microbit.random(5)][microbit.random(5)] = 128
for i in range(5):
for j in range(5):
microbit.display.image.set_pixel_value(i, j, dots[i][j])
dots[i][j] = int(dots[i][j]/2)
microbit.sleep(delay)
led_dance(100)
|
Update for new version of micropython for microbit
|
Update for new version of micropython for microbit
|
Python
|
mit
|
jrmhaig/microbit_playground
|
- import pyb
+ import microbit
def led_dance(delay):
- dots = {}
- control = pyb.Switch(1)
+ dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ]
+ microbit.display.set_display_mode(1)
while True:
+ dots[microbit.random(5)][microbit.random(5)] = 128
+ for i in range(5):
+ for j in range(5):
+ microbit.display.image.set_pixel_value(i, j, dots[i][j])
- if not control.value():
- dots[pyb.millis() % 25] = 16
- for d in dots:
- pyb.pixel(d, dots[d])
- if dots[d] == 0:
- del(dots[d])
- else:
- dots[d] = int(dots[d]/2)
+ dots[i][j] = int(dots[i][j]/2)
- pyb.delay(delay)
+ microbit.sleep(delay)
- led_dance(101)
+ led_dance(100)
|
Update for new version of micropython for microbit
|
## Code Before:
import pyb
def led_dance(delay):
dots = {}
control = pyb.Switch(1)
while True:
if not control.value():
dots[pyb.millis() % 25] = 16
for d in dots:
pyb.pixel(d, dots[d])
if dots[d] == 0:
del(dots[d])
else:
dots[d] = int(dots[d]/2)
pyb.delay(delay)
led_dance(101)
## Instruction:
Update for new version of micropython for microbit
## Code After:
import microbit
def led_dance(delay):
dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ]
microbit.display.set_display_mode(1)
while True:
dots[microbit.random(5)][microbit.random(5)] = 128
for i in range(5):
for j in range(5):
microbit.display.image.set_pixel_value(i, j, dots[i][j])
dots[i][j] = int(dots[i][j]/2)
microbit.sleep(delay)
led_dance(100)
|
...
import microbit
...
def led_dance(delay):
dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ]
microbit.display.set_display_mode(1)
while True:
dots[microbit.random(5)][microbit.random(5)] = 128
for i in range(5):
for j in range(5):
microbit.display.image.set_pixel_value(i, j, dots[i][j])
dots[i][j] = int(dots[i][j]/2)
microbit.sleep(delay)
led_dance(100)
...
|
606b2b6c84e9f9f67606a4d7e521cf4805855a98
|
migrations/versions/0311_populate_returned_letters.py
|
migrations/versions/0311_populate_returned_letters.py
|
from alembic import op
from app.dao.returned_letters_dao import insert_or_update_returned_letters
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
select id, service_id, reference
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
results = conn.execute(sql)
returned_letters = results.fetchall()
references = [x.reference for x in returned_letters]
insert_or_update_returned_letters(references)
def downgrade():
pass
|
from alembic import op
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
select id, service_id, reference, updated_at
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
insert_sql = """
insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at)
values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null)
"""
results = conn.execute(sql)
returned_letters = results.fetchall()
for x in returned_letters:
f = insert_sql.format(x.updated_at.date(), x.service_id, x.id)
conn.execute(f)
def downgrade():
pass
|
Change the insert to use updated_at as the reported_at date
|
Change the insert to use updated_at as the reported_at date
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from alembic import op
-
- from app.dao.returned_letters_dao import insert_or_update_returned_letters
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
- select id, service_id, reference
+ select id, service_id, reference, updated_at
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
+ insert_sql = """
+ insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at)
+ values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null)
+ """
+
results = conn.execute(sql)
returned_letters = results.fetchall()
- references = [x.reference for x in returned_letters]
- insert_or_update_returned_letters(references)
+ for x in returned_letters:
+ f = insert_sql.format(x.updated_at.date(), x.service_id, x.id)
+ conn.execute(f)
def downgrade():
pass
|
Change the insert to use updated_at as the reported_at date
|
## Code Before:
from alembic import op
from app.dao.returned_letters_dao import insert_or_update_returned_letters
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
select id, service_id, reference
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
results = conn.execute(sql)
returned_letters = results.fetchall()
references = [x.reference for x in returned_letters]
insert_or_update_returned_letters(references)
def downgrade():
pass
## Instruction:
Change the insert to use updated_at as the reported_at date
## Code After:
from alembic import op
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
select id, service_id, reference, updated_at
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
insert_sql = """
insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at)
values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null)
"""
results = conn.execute(sql)
returned_letters = results.fetchall()
for x in returned_letters:
f = insert_sql.format(x.updated_at.date(), x.service_id, x.id)
conn.execute(f)
def downgrade():
pass
|
...
from alembic import op
...
sql = """
select id, service_id, reference, updated_at
from notification_history
...
and notification_status = 'returned-letter'"""
insert_sql = """
insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at)
values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null)
"""
results = conn.execute(sql)
...
returned_letters = results.fetchall()
for x in returned_letters:
f = insert_sql.format(x.updated_at.date(), x.service_id, x.id)
conn.execute(f)
...
|
d02e021a68333c52adff38cc869bf217deebfc5c
|
run.py
|
run.py
|
import pygame
from constants import *
from music_maker import *
from tkinter import Tk
def main():
# initialize game engine
pygame.init()
# set screen width/height and caption
screen = pygame.display.set_mode(SCREEN_DIM, pygame.RESIZABLE)
pygame.display.set_caption('Some digital instrument thing')
root = Tk()
root.withdraw() # won't need this
while get_font() == None:
init_font()
MusicMaker(screen)
# close the window and quit
pygame.quit()
print("Finished.")
if __name__ == '__main__':
main()
|
import pygame
from constants import *
from music_maker import *
from tkinter import Tk
def main():
# initialize game engine
pygame.init()
# We only use pygame for drawing, and mixer will hog cpu otherwise
# https://github.com/pygame/pygame/issues/331
pygame.mixer.quit()
# set screen width/height and caption
screen = pygame.display.set_mode(SCREEN_DIM, pygame.RESIZABLE)
pygame.display.set_caption('Some digital instrument thing')
root = Tk()
root.withdraw() # won't need this
while get_font() == None:
init_font()
MusicMaker(screen)
# close the window and quit
pygame.quit()
print("Finished.")
if __name__ == '__main__':
main()
|
Fix pygame mixer init hogging cpu
|
Fix pygame mixer init hogging cpu
|
Python
|
mit
|
kenanbit/loopsichord
|
import pygame
from constants import *
from music_maker import *
from tkinter import Tk
def main():
# initialize game engine
pygame.init()
+
+ # We only use pygame for drawing, and mixer will hog cpu otherwise
+ # https://github.com/pygame/pygame/issues/331
+ pygame.mixer.quit()
# set screen width/height and caption
screen = pygame.display.set_mode(SCREEN_DIM, pygame.RESIZABLE)
pygame.display.set_caption('Some digital instrument thing')
root = Tk()
root.withdraw() # won't need this
while get_font() == None:
init_font()
MusicMaker(screen)
# close the window and quit
pygame.quit()
print("Finished.")
if __name__ == '__main__':
main()
|
Fix pygame mixer init hogging cpu
|
## Code Before:
import pygame
from constants import *
from music_maker import *
from tkinter import Tk
def main():
# initialize game engine
pygame.init()
# set screen width/height and caption
screen = pygame.display.set_mode(SCREEN_DIM, pygame.RESIZABLE)
pygame.display.set_caption('Some digital instrument thing')
root = Tk()
root.withdraw() # won't need this
while get_font() == None:
init_font()
MusicMaker(screen)
# close the window and quit
pygame.quit()
print("Finished.")
if __name__ == '__main__':
main()
## Instruction:
Fix pygame mixer init hogging cpu
## Code After:
import pygame
from constants import *
from music_maker import *
from tkinter import Tk
def main():
# initialize game engine
pygame.init()
# We only use pygame for drawing, and mixer will hog cpu otherwise
# https://github.com/pygame/pygame/issues/331
pygame.mixer.quit()
# set screen width/height and caption
screen = pygame.display.set_mode(SCREEN_DIM, pygame.RESIZABLE)
pygame.display.set_caption('Some digital instrument thing')
root = Tk()
root.withdraw() # won't need this
while get_font() == None:
init_font()
MusicMaker(screen)
# close the window and quit
pygame.quit()
print("Finished.")
if __name__ == '__main__':
main()
|
# ... existing code ...
pygame.init()
# We only use pygame for drawing, and mixer will hog cpu otherwise
# https://github.com/pygame/pygame/issues/331
pygame.mixer.quit()
# ... rest of the code ...
|
d600fc56127f234a7a14b4a89be14b5c31b072e7
|
examples/edge_test.py
|
examples/edge_test.py
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
Update the Edge example test
|
Update the Edge example test
|
Python
|
mit
|
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
|
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
+ self.highlight('div[role="main"]')
- self.assert_element('img[alt="Edge logo"] + span')
- self.highlight('#section_about div + div')
- self.highlight('#section_about div + div > div')
- self.highlight('img[alt="Edge logo"]')
+ self.highlight('img[srcset*="logo"]')
- self.highlight('img[alt="Edge logo"] + span')
- self.highlight('#section_about div + div > div + div')
- self.highlight('#section_about div + div > div + div + div > div')
+ self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
+ self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
+ self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
+ self.highlight('span[aria-live="assertive"]')
+ self.highlight('a[href*="chromium"]')
|
Update the Edge example test
|
## Code Before:
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
## Instruction:
Update the Edge example test
## Code After:
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
# ... existing code ...
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
# ... rest of the code ...
|
a2142fb8a592a9ad9b4870d4685ec02cfa621a77
|
tests/settings.py
|
tests/settings.py
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
)
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
try:
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
except AttributeError:
# Python 3
trusted_root_data = urllib.request.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
|
Fix cert retreival on python 3
|
Fix cert retreival on python 3
|
Python
|
mit
|
educreations/python-iap
|
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
+ try:
- trusted_root_data = urllib.urlretrieve(
+ trusted_root_data = urllib.urlretrieve(
- "https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
+ "https://www.apple.com/appleca/AppleIncRootCertificate.cer",
- )
+ TRUSTED_ROOT_FILE,
+ )
+ except AttributeError:
+ # Python 3
+ trusted_root_data = urllib.request.urlretrieve(
+ "https://www.apple.com/appleca/AppleIncRootCertificate.cer",
+ TRUSTED_ROOT_FILE,
+ )
|
Fix cert retreival on python 3
|
## Code Before:
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer", TRUSTED_ROOT_FILE
)
## Instruction:
Fix cert retreival on python 3
## Code After:
import os
import urllib
TRUSTED_ROOT_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "AppleIncRootCertificate.cer"
)
SECRET_KEY = "notsecr3t"
IAP_SETTINGS = {
"TRUSTED_ROOT_FILE": TRUSTED_ROOT_FILE,
"PRODUCTION_BUNDLE_ID": "com.educreations.ios.Educreations",
}
if not os.path.isfile(TRUSTED_ROOT_FILE):
try:
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
except AttributeError:
# Python 3
trusted_root_data = urllib.request.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
|
# ... existing code ...
if not os.path.isfile(TRUSTED_ROOT_FILE):
try:
trusted_root_data = urllib.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
except AttributeError:
# Python 3
trusted_root_data = urllib.request.urlretrieve(
"https://www.apple.com/appleca/AppleIncRootCertificate.cer",
TRUSTED_ROOT_FILE,
)
# ... rest of the code ...
|
48418ac0fe75bbb331878b80d9d0903dde445838
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pyticketswitch',
version='1.6.1',
author='Matt Jared',
author_email='[email protected]',
packages=[
'pyticketswitch',
'pyticketswitch.test',
'pyticketswitch.interface_objects'
],
license='LICENSE.txt',
description='A Python interface for the Ingresso XML Core API',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
],
)
|
from distutils.core import setup
setup(
name='pyticketswitch',
version='1.6.1',
author='Ingresso',
author_email='[email protected]',
packages=[
'pyticketswitch',
'pyticketswitch.test',
'pyticketswitch.interface_objects'
],
license='LICENSE.txt',
description='A Python interface for the Ingresso XML Core API',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
],
)
|
Update author and email address
|
Update author and email address
|
Python
|
mit
|
ingresso-group/pyticketswitch,ingtechteam/pyticketswitch,graingert/pyticketswitch
|
from distutils.core import setup
setup(
name='pyticketswitch',
version='1.6.1',
- author='Matt Jared',
+ author='Ingresso',
- author_email='[email protected]',
+ author_email='[email protected]',
packages=[
'pyticketswitch',
'pyticketswitch.test',
'pyticketswitch.interface_objects'
],
license='LICENSE.txt',
description='A Python interface for the Ingresso XML Core API',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
],
)
|
Update author and email address
|
## Code Before:
from distutils.core import setup
setup(
name='pyticketswitch',
version='1.6.1',
author='Matt Jared',
author_email='[email protected]',
packages=[
'pyticketswitch',
'pyticketswitch.test',
'pyticketswitch.interface_objects'
],
license='LICENSE.txt',
description='A Python interface for the Ingresso XML Core API',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
],
)
## Instruction:
Update author and email address
## Code After:
from distutils.core import setup
setup(
name='pyticketswitch',
version='1.6.1',
author='Ingresso',
author_email='[email protected]',
packages=[
'pyticketswitch',
'pyticketswitch.test',
'pyticketswitch.interface_objects'
],
license='LICENSE.txt',
description='A Python interface for the Ingresso XML Core API',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 2.7',
],
)
|
// ... existing code ...
version='1.6.1',
author='Ingresso',
author_email='[email protected]',
packages=[
// ... rest of the code ...
|
e3d3893bf4cb8aa782efb05771339a0d59451fe9
|
xbrowse_server/base/management/commands/list_projects.py
|
xbrowse_server/base/management/commands/list_projects.py
|
from django.core.management.base import BaseCommand
from xbrowse_server.base.models import Project
class Command(BaseCommand):
"""Command to generate a ped file for a given project"""
def handle(self, *args, **options):
projects = Project.objects.all()
for project in projects:
individuals = project.get_individuals()
print("%3d families %3d individuals project id: %s" % (
len({i.get_family_id() for i in individuals} - {None,}),
len(individuals),
project.project_id))
|
from django.core.management.base import BaseCommand
from xbrowse_server.base.models import Project
class Command(BaseCommand):
"""Command to print out basic stats on some or all projects. Optionally takes a list of project_ids. """
def handle(self, *args, **options):
if args:
projects = [Project.objects.get(project_id=arg) for arg in args]
else:
projects = Project.objects.all()
for project in projects:
individuals = project.get_individuals()
print("%3d families: %s, %3d individuals, project id: %s. VCF files: %s \n %s" % (
len({i.get_family_id() for i in individuals} - {None,}),
project.family_set.all(),
len(individuals),
project.project_id,
project.get_all_vcf_files(),
project.families_by_vcf().items()
))
|
Print additional stats for each projects
|
Print additional stats for each projects
|
Python
|
agpl-3.0
|
ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/seqr,ssadedin/seqr,ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse,macarthur-lab/xbrowse,ssadedin/seqr,ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse
|
from django.core.management.base import BaseCommand
from xbrowse_server.base.models import Project
class Command(BaseCommand):
- """Command to generate a ped file for a given project"""
+ """Command to print out basic stats on some or all projects. Optionally takes a list of project_ids. """
def handle(self, *args, **options):
+ if args:
+ projects = [Project.objects.get(project_id=arg) for arg in args]
+ else:
- projects = Project.objects.all()
+ projects = Project.objects.all()
+
for project in projects:
individuals = project.get_individuals()
- print("%3d families %3d individuals project id: %s" % (
+ print("%3d families: %s, %3d individuals, project id: %s. VCF files: %s \n %s" % (
len({i.get_family_id() for i in individuals} - {None,}),
+ project.family_set.all(),
len(individuals),
- project.project_id))
+ project.project_id,
+ project.get_all_vcf_files(),
+ project.families_by_vcf().items()
+ ))
|
Print additional stats for each projects
|
## Code Before:
from django.core.management.base import BaseCommand
from xbrowse_server.base.models import Project
class Command(BaseCommand):
"""Command to generate a ped file for a given project"""
def handle(self, *args, **options):
projects = Project.objects.all()
for project in projects:
individuals = project.get_individuals()
print("%3d families %3d individuals project id: %s" % (
len({i.get_family_id() for i in individuals} - {None,}),
len(individuals),
project.project_id))
## Instruction:
Print additional stats for each projects
## Code After:
from django.core.management.base import BaseCommand
from xbrowse_server.base.models import Project
class Command(BaseCommand):
"""Command to print out basic stats on some or all projects. Optionally takes a list of project_ids. """
def handle(self, *args, **options):
if args:
projects = [Project.objects.get(project_id=arg) for arg in args]
else:
projects = Project.objects.all()
for project in projects:
individuals = project.get_individuals()
print("%3d families: %s, %3d individuals, project id: %s. VCF files: %s \n %s" % (
len({i.get_family_id() for i in individuals} - {None,}),
project.family_set.all(),
len(individuals),
project.project_id,
project.get_all_vcf_files(),
project.families_by_vcf().items()
))
|
# ... existing code ...
class Command(BaseCommand):
"""Command to print out basic stats on some or all projects. Optionally takes a list of project_ids. """
# ... modified code ...
def handle(self, *args, **options):
if args:
projects = [Project.objects.get(project_id=arg) for arg in args]
else:
projects = Project.objects.all()
for project in projects:
...
print("%3d families: %s, %3d individuals, project id: %s. VCF files: %s \n %s" % (
len({i.get_family_id() for i in individuals} - {None,}),
project.family_set.all(),
len(individuals),
project.project_id,
project.get_all_vcf_files(),
project.families_by_vcf().items()
))
# ... rest of the code ...
|
012ab9bf79ae2f70079534ce6ab527f8e08a50f3
|
doc/tutorials/python/secure-msg-template.py
|
doc/tutorials/python/secure-msg-template.py
|
async def init():
me = input('Who are you? ').strip()
wallet_name = '%s-wallet' % me
# 1. Create Wallet and Get Wallet Handle
try:
await wallet.create_wallet(pool_name, wallet_name, None, None, None)
except:
pass
wallet_handle = await wallet.open_wallet(wallet_name, None, None)
print('wallet = %s' % wallet_handle)
(my_did, my_vk) = await did.create_and_store_my_did(wallet_handle, "{}")
print('my_did and verkey = %s %s' % (my_did, my_vk))
their = input("Other party's DID and verkey? ").strip().split(' ')
return wallet_handle, my_did, my_vk, their[0], their[1]
|
import asyncio
import time
import re
async def prep(wallet_handle, my_vk, their_vk, msg):
print('prepping %s' % msg)
async def init():
return None, None, None, None, None
async def read(wallet_handle, my_vk):
print('reading')
async def demo():
wallet_handle, my_did, my_vk, their_did, their_vk = await init()
while True:
argv = input('> ').strip().split(' ')
cmd = argv[0].lower()
rest = ' '.join(argv[1:])
if re.match(cmd, 'prep'):
await prep(wallet_handle, my_vk, their_vk, rest)
elif re.match(cmd, 'read'):
await read(wallet_handle, my_vk)
elif re.match(cmd, 'quit'):
break
else:
print('Huh?')
if __name__ == '__main__':
try:
loop = asyncio.get_event_loop()
loop.run_until_complete(demo())
time.sleep(1) # waiting for libindy thread complete
except KeyboardInterrupt:
print('')
|
Fix template that was accidentally overwritten
|
Fix template that was accidentally overwritten
|
Python
|
apache-2.0
|
anastasia-tarasova/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,srottem/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,srottem/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,anastasia-tarasova/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,peacekeeper/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,srottem/indy-sdk,anastasia-tarasova/indy-sdk,anastasia-tarasova/indy-sdk,Artemkaaas/indy-sdk,Artemkaaas/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,peacekeeper/indy-sdk,anastasia-tarasova/indy-sdk,srottem/indy-sdk,peacekeeper/indy-sdk,srottem/indy-sdk,Artemkaaas/indy-sdk,anastasia-tarasova/indy-sdk,anastasia-tarasova/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,srottem/indy-sdk,peacekeeper/indy-sdk,Artemkaaas/indy-sdk,srottem/indy-sdk
|
+ import asyncio
+ import time
+ import re
+
+ async def prep(wallet_handle, my_vk, their_vk, msg):
+ print('prepping %s' % msg)
+
async def init():
+ return None, None, None, None, None
- me = input('Who are you? ').strip()
- wallet_name = '%s-wallet' % me
- # 1. Create Wallet and Get Wallet Handle
+ async def read(wallet_handle, my_vk):
+ print('reading')
+
+ async def demo():
+ wallet_handle, my_did, my_vk, their_did, their_vk = await init()
+
+ while True:
+ argv = input('> ').strip().split(' ')
+ cmd = argv[0].lower()
+ rest = ' '.join(argv[1:])
+ if re.match(cmd, 'prep'):
+ await prep(wallet_handle, my_vk, their_vk, rest)
+ elif re.match(cmd, 'read'):
+ await read(wallet_handle, my_vk)
+ elif re.match(cmd, 'quit'):
+ break
+ else:
+ print('Huh?')
+
+ if __name__ == '__main__':
try:
- await wallet.create_wallet(pool_name, wallet_name, None, None, None)
- except:
- pass
- wallet_handle = await wallet.open_wallet(wallet_name, None, None)
- print('wallet = %s' % wallet_handle)
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(demo())
+ time.sleep(1) # waiting for libindy thread complete
+ except KeyboardInterrupt:
+ print('')
- (my_did, my_vk) = await did.create_and_store_my_did(wallet_handle, "{}")
- print('my_did and verkey = %s %s' % (my_did, my_vk))
-
- their = input("Other party's DID and verkey? ").strip().split(' ')
- return wallet_handle, my_did, my_vk, their[0], their[1]
-
|
Fix template that was accidentally overwritten
|
## Code Before:
async def init():
me = input('Who are you? ').strip()
wallet_name = '%s-wallet' % me
# 1. Create Wallet and Get Wallet Handle
try:
await wallet.create_wallet(pool_name, wallet_name, None, None, None)
except:
pass
wallet_handle = await wallet.open_wallet(wallet_name, None, None)
print('wallet = %s' % wallet_handle)
(my_did, my_vk) = await did.create_and_store_my_did(wallet_handle, "{}")
print('my_did and verkey = %s %s' % (my_did, my_vk))
their = input("Other party's DID and verkey? ").strip().split(' ')
return wallet_handle, my_did, my_vk, their[0], their[1]
## Instruction:
Fix template that was accidentally overwritten
## Code After:
import asyncio
import time
import re
async def prep(wallet_handle, my_vk, their_vk, msg):
print('prepping %s' % msg)
async def init():
return None, None, None, None, None
async def read(wallet_handle, my_vk):
print('reading')
async def demo():
wallet_handle, my_did, my_vk, their_did, their_vk = await init()
while True:
argv = input('> ').strip().split(' ')
cmd = argv[0].lower()
rest = ' '.join(argv[1:])
if re.match(cmd, 'prep'):
await prep(wallet_handle, my_vk, their_vk, rest)
elif re.match(cmd, 'read'):
await read(wallet_handle, my_vk)
elif re.match(cmd, 'quit'):
break
else:
print('Huh?')
if __name__ == '__main__':
try:
loop = asyncio.get_event_loop()
loop.run_until_complete(demo())
time.sleep(1) # waiting for libindy thread complete
except KeyboardInterrupt:
print('')
|
# ... existing code ...
import asyncio
import time
import re
async def prep(wallet_handle, my_vk, their_vk, msg):
print('prepping %s' % msg)
async def init():
return None, None, None, None, None
async def read(wallet_handle, my_vk):
print('reading')
async def demo():
wallet_handle, my_did, my_vk, their_did, their_vk = await init()
while True:
argv = input('> ').strip().split(' ')
cmd = argv[0].lower()
rest = ' '.join(argv[1:])
if re.match(cmd, 'prep'):
await prep(wallet_handle, my_vk, their_vk, rest)
elif re.match(cmd, 'read'):
await read(wallet_handle, my_vk)
elif re.match(cmd, 'quit'):
break
else:
print('Huh?')
if __name__ == '__main__':
try:
loop = asyncio.get_event_loop()
loop.run_until_complete(demo())
time.sleep(1) # waiting for libindy thread complete
except KeyboardInterrupt:
print('')
# ... rest of the code ...
|
a24d6a25cb7ee5101e8131a9719744f79b23c11b
|
examples/quotes/quotes.py
|
examples/quotes/quotes.py
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
my_address = nw0.advertise(my_name, address_pattern)
print("Advertising %s on %s" % (my_name, my_address))
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
if __name__ == '__main__':
main(*sys.argv[1:])
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
my_address = nw0.advertise(my_name, address_pattern)
print("Advertising %s on %s" % (my_name, my_address))
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(my_address, "quote", quote)
time.sleep(1)
if __name__ == '__main__':
main(*sys.argv[1:])
|
Send notification to the correct address
|
Send notification to the correct address
|
Python
|
mit
|
tjguk/networkzero,tjguk/networkzero,tjguk/networkzero
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
my_address = nw0.advertise(my_name, address_pattern)
print("Advertising %s on %s" % (my_name, my_address))
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
-
+
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
- quote = random.choice(quotes)
- nw0.send_notification(address, "quote", quote)
+ quote = random.choice(quotes)
+ nw0.send_notification(my_address, "quote", quote)
+
- time.sleep(0.5)
+ time.sleep(1)
if __name__ == '__main__':
main(*sys.argv[1:])
|
Send notification to the correct address
|
## Code Before:
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
my_address = nw0.advertise(my_name, address_pattern)
print("Advertising %s on %s" % (my_name, my_address))
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
if __name__ == '__main__':
main(*sys.argv[1:])
## Instruction:
Send notification to the correct address
## Code After:
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
my_address = nw0.advertise(my_name, address_pattern)
print("Advertising %s on %s" % (my_name, my_address))
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(my_address, "quote", quote)
time.sleep(1)
if __name__ == '__main__':
main(*sys.argv[1:])
|
...
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
...
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(my_address, "quote", quote)
time.sleep(1)
...
|
21cb063ec63792ddeb45a62570a8565c69f2091b
|
tests/functional/test_product.py
|
tests/functional/test_product.py
|
from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
def test_product_navigation(self):
# Create a product
product = ProductFactory.create()
# Get the product detail page
self.browser.get(self.live_server_url + product.get_absolute_url())
# Assert that the title is as expected
self.assertIn(product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
# Create a sample product
product1 = ProductFactory.create()
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
self.browser.find_element_by_link_text(product1.name).click()
# Assert that the page is the one expected
self.assertIn(product1.name, self.browser.title)
|
from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
def setUp(self):
super(ProductTest, self).setUp()
# Create a product
self.product = ProductFactory.create()
def test_product_navigation(self):
# Get the product detail page
self.browser.get(self.live_server_url + self.product.get_absolute_url())
# Assert that the title is as expected
self.assertIn(self.product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
self.browser.find_element_by_link_text(self.product.name).click()
# Assert that the page is the one expected
self.assertIn(self.product.name, self.browser.title)
|
Create product in setUp() method for DRY
|
Create product in setUp() method for DRY
Create the product to be reused in the test methods in the setUp method
that is run before each test method
|
Python
|
bsd-3-clause
|
kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop
|
from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
+ def setUp(self):
+ super(ProductTest, self).setUp()
+ # Create a product
+ self.product = ProductFactory.create()
+
def test_product_navigation(self):
- # Create a product
- product = ProductFactory.create()
-
# Get the product detail page
- self.browser.get(self.live_server_url + product.get_absolute_url())
+ self.browser.get(self.live_server_url + self.product.get_absolute_url())
# Assert that the title is as expected
- self.assertIn(product.name, self.browser.title)
+ self.assertIn(self.product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
- # Create a sample product
- product1 = ProductFactory.create()
-
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
- self.browser.find_element_by_link_text(product1.name).click()
+ self.browser.find_element_by_link_text(self.product.name).click()
# Assert that the page is the one expected
- self.assertIn(product1.name, self.browser.title)
+ self.assertIn(self.product.name, self.browser.title)
|
Create product in setUp() method for DRY
|
## Code Before:
from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
def test_product_navigation(self):
# Create a product
product = ProductFactory.create()
# Get the product detail page
self.browser.get(self.live_server_url + product.get_absolute_url())
# Assert that the title is as expected
self.assertIn(product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
# Create a sample product
product1 = ProductFactory.create()
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
self.browser.find_element_by_link_text(product1.name).click()
# Assert that the page is the one expected
self.assertIn(product1.name, self.browser.title)
## Instruction:
Create product in setUp() method for DRY
## Code After:
from .base import FunctionalTest
from store.tests.factories import *
class ProductTest(FunctionalTest):
def setUp(self):
super(ProductTest, self).setUp()
# Create a product
self.product = ProductFactory.create()
def test_product_navigation(self):
# Get the product detail page
self.browser.get(self.live_server_url + self.product.get_absolute_url())
# Assert that the title is as expected
self.assertIn(self.product.name, self.browser.title)
def test_product_navigation_from_homepage(self):
# Get the homepage
self.browser.get(self.live_server_url)
# Navigate to the Product Page
self.browser.find_element_by_link_text(self.product.name).click()
# Assert that the page is the one expected
self.assertIn(self.product.name, self.browser.title)
|
// ... existing code ...
def setUp(self):
super(ProductTest, self).setUp()
# Create a product
self.product = ProductFactory.create()
def test_product_navigation(self):
# Get the product detail page
self.browser.get(self.live_server_url + self.product.get_absolute_url())
// ... modified code ...
# Assert that the title is as expected
self.assertIn(self.product.name, self.browser.title)
...
def test_product_navigation_from_homepage(self):
# Get the homepage
...
# Navigate to the Product Page
self.browser.find_element_by_link_text(self.product.name).click()
...
# Assert that the page is the one expected
self.assertIn(self.product.name, self.browser.title)
// ... rest of the code ...
|
b6d61fef0fe372c7149fa52e2ab1acff144d0118
|
tests/fixtures/dummy/facilities.py
|
tests/fixtures/dummy/facilities.py
|
from fixture import DataSet
from .address import AddressData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
from fixture import DataSet
from .address import AddressData
from .finance import AccountData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
fee_account = AccountData.dummy_revenue
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
fee_account = AccountData.dummy_revenue
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
Add fee_account to BuildingData of legacy test base
|
Add fee_account to BuildingData of legacy test base
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
from fixture import DataSet
from .address import AddressData
+ from .finance import AccountData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
+ fee_account = AccountData.dummy_revenue
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
+ fee_account = AccountData.dummy_revenue
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
Add fee_account to BuildingData of legacy test base
|
## Code Before:
from fixture import DataSet
from .address import AddressData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
## Instruction:
Add fee_account to BuildingData of legacy test base
## Code After:
from fixture import DataSet
from .address import AddressData
from .finance import AccountData
class SiteData(DataSet):
class dummy:
name = "dummy"
class BuildingData(DataSet):
class dummy_house1:
site = SiteData.dummy
street = "dummy"
number = "01"
short_name = "abc"
fee_account = AccountData.dummy_revenue
class dummy_house2:
site = SiteData.dummy
street = "dummy"
number = "02"
short_name = "def"
fee_account = AccountData.dummy_revenue
class RoomData(DataSet):
class dummy_room1:
number = "1"
level = 1
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address1
class dummy_room2:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house2
address = AddressData.dummy_address2
class dummy_room3:
number = "2"
level = 2
inhabitable = True
building = BuildingData.dummy_house1
address = AddressData.dummy_address3
class dummy_room4(dummy_room1):
number = "2"
address = AddressData.dummy_address4
class dummy_room5(dummy_room1):
number = "2"
address = AddressData.dummy_address5
|
...
from .address import AddressData
from .finance import AccountData
...
short_name = "abc"
fee_account = AccountData.dummy_revenue
...
short_name = "def"
fee_account = AccountData.dummy_revenue
...
|
610fb9c9ac6a225df10ec770d44564e61af53ce0
|
polling_stations/apps/addressbase/management/commands/import_cleaned_addresses.py
|
polling_stations/apps/addressbase/management/commands/import_cleaned_addresses.py
|
import os
import glob
from django.apps import apps
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Turn off auto system check for all apps
We will maunally run system checks only for the
'addressbase' and 'pollingstations' apps
"""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
def handle(self, *args, **kwargs):
"""
Manually run system checks for the
'addressbase' and 'pollingstations' apps
Management commands can ignore checks that only apply to
the apps supporting the website part of the project
"""
self.check([
apps.get_app_config('addressbase'),
apps.get_app_config('pollingstations')
])
glob_str = os.path.join(
kwargs['cleaned_ab_path'],
"*_cleaned.csv"
)
for cleaned_file_path in glob.glob(glob_str):
print(cleaned_file_path)
cursor = connection.cursor()
cursor.execute("""
COPY addressbase_address (UPRN,address,postcode,location)
FROM '{}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(cleaned_file_path))
|
import os
import glob
from django.apps import apps
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Turn off auto system check for all apps
We will maunally run system checks only for the
'addressbase' and 'pollingstations' apps
"""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
def handle(self, *args, **kwargs):
"""
Manually run system checks for the
'addressbase' and 'pollingstations' apps
Management commands can ignore checks that only apply to
the apps supporting the website part of the project
"""
self.check([
apps.get_app_config('addressbase'),
apps.get_app_config('pollingstations')
])
glob_str = os.path.join(
kwargs['cleaned_ab_path'],
"*_cleaned.csv"
)
for cleaned_file_path in glob.glob(glob_str):
cleaned_file_path = os.path.abspath(cleaned_file_path)
print(cleaned_file_path)
cursor = connection.cursor()
cursor.execute("""
COPY addressbase_address (UPRN,address,postcode,location)
FROM '{}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(cleaned_file_path))
|
Use abspath for COPY command
|
Use abspath for COPY command
|
Python
|
bsd-3-clause
|
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
import os
import glob
from django.apps import apps
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Turn off auto system check for all apps
We will maunally run system checks only for the
'addressbase' and 'pollingstations' apps
"""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
def handle(self, *args, **kwargs):
"""
Manually run system checks for the
'addressbase' and 'pollingstations' apps
Management commands can ignore checks that only apply to
the apps supporting the website part of the project
"""
self.check([
apps.get_app_config('addressbase'),
apps.get_app_config('pollingstations')
])
glob_str = os.path.join(
kwargs['cleaned_ab_path'],
"*_cleaned.csv"
)
for cleaned_file_path in glob.glob(glob_str):
+ cleaned_file_path = os.path.abspath(cleaned_file_path)
print(cleaned_file_path)
cursor = connection.cursor()
cursor.execute("""
COPY addressbase_address (UPRN,address,postcode,location)
FROM '{}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(cleaned_file_path))
|
Use abspath for COPY command
|
## Code Before:
import os
import glob
from django.apps import apps
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Turn off auto system check for all apps
We will maunally run system checks only for the
'addressbase' and 'pollingstations' apps
"""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
def handle(self, *args, **kwargs):
"""
Manually run system checks for the
'addressbase' and 'pollingstations' apps
Management commands can ignore checks that only apply to
the apps supporting the website part of the project
"""
self.check([
apps.get_app_config('addressbase'),
apps.get_app_config('pollingstations')
])
glob_str = os.path.join(
kwargs['cleaned_ab_path'],
"*_cleaned.csv"
)
for cleaned_file_path in glob.glob(glob_str):
print(cleaned_file_path)
cursor = connection.cursor()
cursor.execute("""
COPY addressbase_address (UPRN,address,postcode,location)
FROM '{}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(cleaned_file_path))
## Instruction:
Use abspath for COPY command
## Code After:
import os
import glob
from django.apps import apps
from django.db import connection
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Turn off auto system check for all apps
We will maunally run system checks only for the
'addressbase' and 'pollingstations' apps
"""
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument(
'cleaned_ab_path',
help='The path to the folder containing the cleaned AddressBase CSVs'
)
def handle(self, *args, **kwargs):
"""
Manually run system checks for the
'addressbase' and 'pollingstations' apps
Management commands can ignore checks that only apply to
the apps supporting the website part of the project
"""
self.check([
apps.get_app_config('addressbase'),
apps.get_app_config('pollingstations')
])
glob_str = os.path.join(
kwargs['cleaned_ab_path'],
"*_cleaned.csv"
)
for cleaned_file_path in glob.glob(glob_str):
cleaned_file_path = os.path.abspath(cleaned_file_path)
print(cleaned_file_path)
cursor = connection.cursor()
cursor.execute("""
COPY addressbase_address (UPRN,address,postcode,location)
FROM '{}' (FORMAT CSV, DELIMITER ',', quote '"');
""".format(cleaned_file_path))
|
...
for cleaned_file_path in glob.glob(glob_str):
cleaned_file_path = os.path.abspath(cleaned_file_path)
print(cleaned_file_path)
...
|
a9b22b76203467ec63ce0592e32498cfecdedca3
|
tests/config.py
|
tests/config.py
|
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
import ConfigParser
import unittest
config = ConfigParser.ConfigParser()
config.read('config')
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = config.get('test', 'community_url')
def create_auth():
auth = AuthNonSSO(config.get('test', 'token'))
return auth
|
import sys
sys.path.append("../ideascaly")
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
import ConfigParser
import unittest
config = ConfigParser.ConfigParser()
config.read('config')
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = config.get('test', 'community_url')
def create_auth():
auth = AuthNonSSO(config.get('test', 'token'))
return auth
|
Add the directory of the project module to system path
|
Add the directory of the project module to system path
|
Python
|
mit
|
joausaga/ideascaly
|
+ import sys
+ sys.path.append("../ideascaly")
+
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
import ConfigParser
import unittest
config = ConfigParser.ConfigParser()
config.read('config')
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = config.get('test', 'community_url')
def create_auth():
auth = AuthNonSSO(config.get('test', 'token'))
return auth
|
Add the directory of the project module to system path
|
## Code Before:
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
import ConfigParser
import unittest
config = ConfigParser.ConfigParser()
config.read('config')
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = config.get('test', 'community_url')
def create_auth():
auth = AuthNonSSO(config.get('test', 'token'))
return auth
## Instruction:
Add the directory of the project module to system path
## Code After:
import sys
sys.path.append("../ideascaly")
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
import ConfigParser
import unittest
config = ConfigParser.ConfigParser()
config.read('config')
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = config.get('test', 'community_url')
def create_auth():
auth = AuthNonSSO(config.get('test', 'token'))
return auth
|
// ... existing code ...
import sys
sys.path.append("../ideascaly")
from ideascaly.auth import AuthNonSSO
// ... rest of the code ...
|
7895b0a39694e88ed1bdd425c69fb747b7531c59
|
indico/testing/mocks.py
|
indico/testing/mocks.py
|
class MockConferenceHolder:
# This class is monkeypatched on top of the real conferenceholder
_events = {}
def __init__(self):
pass
@classmethod
def add(cls, event):
if event.id in cls._events:
__tracebackhide__ = True
raise Exception("Event '{}' already exists".format(event.id))
cls._events[event.id] = event
@classmethod
def remove(cls, event):
del cls._events[event.id]
@classmethod
def getById(cls, id_):
return cls._events.get(id_)
class MockConference(object):
def __repr__(self):
return '<MockConference({})>'.format(self.id)
def getId(self):
return self.id
|
class MockConferenceHolder:
# This class is monkeypatched on top of the real conferenceholder
_events = {}
def __init__(self):
pass
@classmethod
def add(cls, event):
if event.id in cls._events:
__tracebackhide__ = True
raise Exception("Event '{}' already exists".format(event.id))
cls._events[int(event.id)] = event
@classmethod
def remove(cls, event):
del cls._events[int(event.id)]
@classmethod
def getById(cls, id_, quiet=None):
return cls._events.get(int(id_))
class MockConference(object):
def __repr__(self):
return '<MockConference({})>'.format(self.id)
def getId(self):
return self.id
|
Fix str/int usage in MockConferenceHolder
|
Fix str/int usage in MockConferenceHolder
|
Python
|
mit
|
indico/indico,ThiefMaster/indico,indico/indico,OmeGak/indico,ThiefMaster/indico,OmeGak/indico,mic4ael/indico,DirkHoffmann/indico,mvidalgarcia/indico,mvidalgarcia/indico,OmeGak/indico,pferreir/indico,ThiefMaster/indico,mic4ael/indico,pferreir/indico,DirkHoffmann/indico,ThiefMaster/indico,OmeGak/indico,mic4ael/indico,indico/indico,mvidalgarcia/indico,pferreir/indico,mic4ael/indico,pferreir/indico,mvidalgarcia/indico,DirkHoffmann/indico,DirkHoffmann/indico,indico/indico
|
class MockConferenceHolder:
# This class is monkeypatched on top of the real conferenceholder
_events = {}
def __init__(self):
pass
@classmethod
def add(cls, event):
if event.id in cls._events:
__tracebackhide__ = True
raise Exception("Event '{}' already exists".format(event.id))
- cls._events[event.id] = event
+ cls._events[int(event.id)] = event
@classmethod
def remove(cls, event):
- del cls._events[event.id]
+ del cls._events[int(event.id)]
@classmethod
- def getById(cls, id_):
+ def getById(cls, id_, quiet=None):
- return cls._events.get(id_)
+ return cls._events.get(int(id_))
class MockConference(object):
def __repr__(self):
return '<MockConference({})>'.format(self.id)
def getId(self):
return self.id
|
Fix str/int usage in MockConferenceHolder
|
## Code Before:
class MockConferenceHolder:
# This class is monkeypatched on top of the real conferenceholder
_events = {}
def __init__(self):
pass
@classmethod
def add(cls, event):
if event.id in cls._events:
__tracebackhide__ = True
raise Exception("Event '{}' already exists".format(event.id))
cls._events[event.id] = event
@classmethod
def remove(cls, event):
del cls._events[event.id]
@classmethod
def getById(cls, id_):
return cls._events.get(id_)
class MockConference(object):
def __repr__(self):
return '<MockConference({})>'.format(self.id)
def getId(self):
return self.id
## Instruction:
Fix str/int usage in MockConferenceHolder
## Code After:
class MockConferenceHolder:
# This class is monkeypatched on top of the real conferenceholder
_events = {}
def __init__(self):
pass
@classmethod
def add(cls, event):
if event.id in cls._events:
__tracebackhide__ = True
raise Exception("Event '{}' already exists".format(event.id))
cls._events[int(event.id)] = event
@classmethod
def remove(cls, event):
del cls._events[int(event.id)]
@classmethod
def getById(cls, id_, quiet=None):
return cls._events.get(int(id_))
class MockConference(object):
def __repr__(self):
return '<MockConference({})>'.format(self.id)
def getId(self):
return self.id
|
# ... existing code ...
raise Exception("Event '{}' already exists".format(event.id))
cls._events[int(event.id)] = event
# ... modified code ...
def remove(cls, event):
del cls._events[int(event.id)]
...
@classmethod
def getById(cls, id_, quiet=None):
return cls._events.get(int(id_))
# ... rest of the code ...
|
beb06f3377a5e3e52f5756a1ecbf4197c7a3e99e
|
base/components/correlations/managers.py
|
base/components/correlations/managers.py
|
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
Remove the Membership special case. We want everything correlated.
|
Remove the Membership special case. We want everything correlated.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
- # Membership is a special case. Since most groups are static
- # (or non-generational), the date the group is formed is the same as
- # the date its members joined. So if those two values are equal, stop
- # the process.
- if not timestamp or (instance._meta.model_name == 'membership'
- and instance.started == instance.group.started):
- return
-
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
Remove the Membership special case. We want everything correlated.
|
## Code Before:
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
# Membership is a special case. Since most groups are static
# (or non-generational), the date the group is formed is the same as
# the date its members joined. So if those two values are equal, stop
# the process.
if not timestamp or (instance._meta.model_name == 'membership'
and instance.started == instance.group.started):
return
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
## Instruction:
Remove the Membership special case. We want everything correlated.
## Code After:
from datetime import date
from django.contrib.contenttypes.models import ContentType
from django.db import models
class CorrelationManager(models.Manager):
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
defaults = {
'timestamp': timestamp,
'julian': timestamp.timetuple().tm_yday,
'year': timestamp.year,
'month': timestamp.month,
'day': timestamp.day,
}
correlation, created = self.get_or_create(
content_type=ctype,
object_id=instance._get_pk_val(),
identifier=instance._meta.model_name,
date_field=attribute,
defaults=defaults
)
for key, value in defaults.iteritems():
setattr(correlation, key, value)
correlation.save()
return
def get_query_set(self):
qs = super(CorrelationManager, self).get_query_set()
return qs #.prefetch_related('content_object')
def today(self):
qs = self.get_query_set()
return qs.filter(julian=date.today().timetuple().tm_yday)
|
# ... existing code ...
def update_or_create(self, instance, timestamp, attribute):
ctype = ContentType.objects.get_for_model(instance.sender)
# ... rest of the code ...
|
ce28b359122475f544b9ae3bc9e05a5bc02377e4
|
conda_kapsel/internal/py2_compat.py
|
conda_kapsel/internal/py2_compat.py
|
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
Fix unicode keys in addition to values for windows/py2 environment
|
Fix unicode keys in addition to values for windows/py2 environment
|
Python
|
bsd-3-clause
|
conda/kapsel,conda/kapsel
|
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
- assert isinstance(key, basestring) # noqa
+ if isinstance(key, unicode): # noqa
+ key = key.encode()
+ if isinstance(value, unicode): # noqa
+ value = value.encode()
assert isinstance(key, str)
- if isinstance(value, unicode): # noqa
- environ_copy[key] = value.encode()
- assert isinstance(environ_copy[key], str)
- elif not isinstance(value, str):
+ assert isinstance(value, str)
- raise TypeError("Environment contains non-unicode non-str value %r" % value)
- else:
- environ_copy[key] = value
+ environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
Fix unicode keys in addition to values for windows/py2 environment
|
## Code Before:
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
assert isinstance(key, basestring) # noqa
assert isinstance(key, str)
if isinstance(value, unicode): # noqa
environ_copy[key] = value.encode()
assert isinstance(environ_copy[key], str)
elif not isinstance(value, str):
raise TypeError("Environment contains non-unicode non-str value %r" % value)
else:
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
## Instruction:
Fix unicode keys in addition to values for windows/py2 environment
## Code After:
from __future__ import absolute_import, print_function
import platform
import sys
_PY2 = sys.version_info[0] == 2
def is_string(s):
if _PY2: # pragma: no cover (py2/py3)
return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa
else: # pragma: no cover (py2/py3)
return isinstance(s, str) # pragma: no cover (py2/py3)
def env_without_unicode(environ):
# On Windows / Python 2.7, Popen explodes if given unicode strings in the environment.
if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3)
environ_copy = dict()
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
else: # pragma: no cover (py2/py3)
return environ
|
# ... existing code ...
for key, value in environ.items():
if isinstance(key, unicode): # noqa
key = key.encode()
if isinstance(value, unicode): # noqa
value = value.encode()
assert isinstance(key, str)
assert isinstance(value, str)
environ_copy[key] = value
return environ_copy
# ... rest of the code ...
|
9328069cf7c871d701d0299e8665ef60572e8061
|
fandjango/decorators.py
|
fandjango/decorators.py
|
from functools import wraps
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.conf import settings
from utils import redirect_to_facebook_authorization
def facebook_authorization_required(redirect_uri=False):
"""
Redirect Facebook canvas views to authorization if required.
Arguments:
redirect_uri -- A string describing an URI to redirect to after authorization is complete.
Defaults to current URI in Facebook canvas (ex. http://apps.facebook.com/myapp/path/).
"""
def decorator(function):
@wraps(function)
def wrapper(request, *args, **kwargs):
if not request.facebook or not request.facebook.user:
return redirect_to_facebook_authorization(
redirect_uri = redirect_uri or settings.FACEBOOK_APPLICATION_URL + request.get_full_path()
)
return function(request, *args, **kwargs)
return wrapper
return decorator
|
from functools import wraps
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.handlers.wsgi import WSGIRequest
from django.conf import settings
from utils import redirect_to_facebook_authorization
def facebook_authorization_required(redirect_uri=False):
"""
Redirect Facebook canvas views to authorization if required.
Arguments:
redirect_uri -- A string describing an URI to redirect to after authorization is complete.
Defaults to current URI in Facebook canvas (ex. http://apps.facebook.com/myapp/path/).
"""
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
request = [arg for arg in args if arg.__class__ is WSGIRequest][0]
if not request.facebook or not request.facebook.user:
return redirect_to_facebook_authorization(
redirect_uri = redirect_uri or settings.FACEBOOK_APPLICATION_URL + request.get_full_path()
)
return function(*args, **kwargs)
return wrapper
return decorator
|
Fix a bug that caused the "facebook_authorization_required" decorator to be incompatible with Django libraries that modify the order of arguments given to views.
|
Fix a bug that caused the "facebook_authorization_required" decorator to be incompatible
with Django libraries that modify the order of arguments given to views.
|
Python
|
mit
|
jgorset/fandjango,jgorset/fandjango
|
from functools import wraps
from django.http import HttpResponse
from django.core.urlresolvers import reverse
+ from django.core.handlers.wsgi import WSGIRequest
from django.conf import settings
from utils import redirect_to_facebook_authorization
def facebook_authorization_required(redirect_uri=False):
"""
Redirect Facebook canvas views to authorization if required.
Arguments:
redirect_uri -- A string describing an URI to redirect to after authorization is complete.
Defaults to current URI in Facebook canvas (ex. http://apps.facebook.com/myapp/path/).
"""
def decorator(function):
@wraps(function)
- def wrapper(request, *args, **kwargs):
+ def wrapper(*args, **kwargs):
+
+ request = [arg for arg in args if arg.__class__ is WSGIRequest][0]
+
if not request.facebook or not request.facebook.user:
return redirect_to_facebook_authorization(
redirect_uri = redirect_uri or settings.FACEBOOK_APPLICATION_URL + request.get_full_path()
)
+
- return function(request, *args, **kwargs)
+ return function(*args, **kwargs)
return wrapper
return decorator
|
Fix a bug that caused the "facebook_authorization_required" decorator to be incompatible with Django libraries that modify the order of arguments given to views.
|
## Code Before:
from functools import wraps
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.conf import settings
from utils import redirect_to_facebook_authorization
def facebook_authorization_required(redirect_uri=False):
"""
Redirect Facebook canvas views to authorization if required.
Arguments:
redirect_uri -- A string describing an URI to redirect to after authorization is complete.
Defaults to current URI in Facebook canvas (ex. http://apps.facebook.com/myapp/path/).
"""
def decorator(function):
@wraps(function)
def wrapper(request, *args, **kwargs):
if not request.facebook or not request.facebook.user:
return redirect_to_facebook_authorization(
redirect_uri = redirect_uri or settings.FACEBOOK_APPLICATION_URL + request.get_full_path()
)
return function(request, *args, **kwargs)
return wrapper
return decorator
## Instruction:
Fix a bug that caused the "facebook_authorization_required" decorator to be incompatible with Django libraries that modify the order of arguments given to views.
## Code After:
from functools import wraps
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.handlers.wsgi import WSGIRequest
from django.conf import settings
from utils import redirect_to_facebook_authorization
def facebook_authorization_required(redirect_uri=False):
"""
Redirect Facebook canvas views to authorization if required.
Arguments:
redirect_uri -- A string describing an URI to redirect to after authorization is complete.
Defaults to current URI in Facebook canvas (ex. http://apps.facebook.com/myapp/path/).
"""
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
request = [arg for arg in args if arg.__class__ is WSGIRequest][0]
if not request.facebook or not request.facebook.user:
return redirect_to_facebook_authorization(
redirect_uri = redirect_uri or settings.FACEBOOK_APPLICATION_URL + request.get_full_path()
)
return function(*args, **kwargs)
return wrapper
return decorator
|
...
from django.core.urlresolvers import reverse
from django.core.handlers.wsgi import WSGIRequest
from django.conf import settings
...
@wraps(function)
def wrapper(*args, **kwargs):
request = [arg for arg in args if arg.__class__ is WSGIRequest][0]
if not request.facebook or not request.facebook.user:
...
)
return function(*args, **kwargs)
return wrapper
...
|
2acb5a2eb7ae0a0f8ea8423a7da5a7a8b9f07151
|
fore/mailer.py
|
fore/mailer.py
|
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
Move test message into function.
|
Move test message into function.
|
Python
|
artistic-2.0
|
Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension
|
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
+ def test():
- a_message = 'There is someting I need to tell you.'
+ a_message = 'There is someting I need to tell you.'
- AlertMessage(a_message)
+ AlertMessage(a_message)
|
Move test message into function.
|
## Code Before:
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
## Instruction:
Move test message into function.
## Code After:
import smtplib
from email.mime.text import MIMEText
import apikeys
def AlertMessage(message, subject='Glitch System Message', me=apikeys.system_email, you=apikeys.admin_email):
msg = MIMEText(message)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = you
# Send the message via our own SMTP server, but don't include the
# envelope header.
s = smtplib.SMTP('localhost')
s.sendmail(me, [you], msg.as_string())
s.quit()
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
|
# ... existing code ...
def test():
a_message = 'There is someting I need to tell you.'
AlertMessage(a_message)
# ... rest of the code ...
|
442f21bfde16f72d4480fa7fd9dea2eac741a857
|
src/analyses/views.py
|
src/analyses/views.py
|
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.utils.translation import ugettext_lazy as _
from django.views.generic import CreateView, TemplateView
from .forms import AbstractAnalysisCreateForm
from .pipelines import AVAILABLE_PIPELINES
User = get_user_model()
class SelectNewAnalysisTypeView(LoginRequiredMixin, TemplateView):
template_name = "analyses/new_analysis_by_type.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['available_pipelines'] = AVAILABLE_PIPELINES
return context
class AbstractAnalysisFormView(LoginRequiredMixin, CreateView):
form_class = AbstractAnalysisCreateForm
template_name = None
analysis_type = 'AbstractAnalysis'
analysis_description = ''
analysis_create_url = None
def get_form_kwargs(self):
"""Pass request object for form creation"""
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
response = super().form_valid(form)
messages.add_message(
self.request, messages.INFO,
_('You just created a %(analysis_type)s analysis!') % {
'analysis_type': self.analysis_type
}
)
return response
|
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.utils.translation import ugettext_lazy as _
from django.views.generic import CreateView, TemplateView
from .forms import AbstractAnalysisCreateForm
from .pipelines import AVAILABLE_PIPELINES
User = get_user_model()
class SelectNewAnalysisTypeView(LoginRequiredMixin, TemplateView):
template_name = "analyses/new_analysis_by_type.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['available_pipelines'] = AVAILABLE_PIPELINES
return context
class AbstractAnalysisFormView(LoginRequiredMixin, CreateView):
form_class = AbstractAnalysisCreateForm
template_name = None
analysis_type = 'AbstractAnalysis'
analysis_description = ''
analysis_create_url = None
def get_form_kwargs(self):
"""Pass request object for form creation"""
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
response = super().form_valid(form)
messages.add_message(
self.request, messages.INFO,
_(
'You just created a %(analysis_type)s analysis! '
'View its detail <a href="%(analysis_detail_url)s">here</a>.'
) % {
'analysis_type': self.analysis_type,
'analysis_detail_url': self.object.get_absolute_url(),
},
extra_tags='safe',
)
return response
|
Include analysis detail view URL in message
|
Include analysis detail view URL in message
|
Python
|
mit
|
ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai
|
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.utils.translation import ugettext_lazy as _
from django.views.generic import CreateView, TemplateView
from .forms import AbstractAnalysisCreateForm
from .pipelines import AVAILABLE_PIPELINES
User = get_user_model()
class SelectNewAnalysisTypeView(LoginRequiredMixin, TemplateView):
template_name = "analyses/new_analysis_by_type.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['available_pipelines'] = AVAILABLE_PIPELINES
return context
class AbstractAnalysisFormView(LoginRequiredMixin, CreateView):
form_class = AbstractAnalysisCreateForm
template_name = None
analysis_type = 'AbstractAnalysis'
analysis_description = ''
analysis_create_url = None
def get_form_kwargs(self):
"""Pass request object for form creation"""
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
response = super().form_valid(form)
messages.add_message(
self.request, messages.INFO,
+ _(
- _('You just created a %(analysis_type)s analysis!') % {
+ 'You just created a %(analysis_type)s analysis! '
+ 'View its detail <a href="%(analysis_detail_url)s">here</a>.'
+ ) % {
- 'analysis_type': self.analysis_type
+ 'analysis_type': self.analysis_type,
+ 'analysis_detail_url': self.object.get_absolute_url(),
- }
+ },
+ extra_tags='safe',
)
return response
|
Include analysis detail view URL in message
|
## Code Before:
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.utils.translation import ugettext_lazy as _
from django.views.generic import CreateView, TemplateView
from .forms import AbstractAnalysisCreateForm
from .pipelines import AVAILABLE_PIPELINES
User = get_user_model()
class SelectNewAnalysisTypeView(LoginRequiredMixin, TemplateView):
template_name = "analyses/new_analysis_by_type.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['available_pipelines'] = AVAILABLE_PIPELINES
return context
class AbstractAnalysisFormView(LoginRequiredMixin, CreateView):
form_class = AbstractAnalysisCreateForm
template_name = None
analysis_type = 'AbstractAnalysis'
analysis_description = ''
analysis_create_url = None
def get_form_kwargs(self):
"""Pass request object for form creation"""
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
response = super().form_valid(form)
messages.add_message(
self.request, messages.INFO,
_('You just created a %(analysis_type)s analysis!') % {
'analysis_type': self.analysis_type
}
)
return response
## Instruction:
Include analysis detail view URL in message
## Code After:
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.utils.translation import ugettext_lazy as _
from django.views.generic import CreateView, TemplateView
from .forms import AbstractAnalysisCreateForm
from .pipelines import AVAILABLE_PIPELINES
User = get_user_model()
class SelectNewAnalysisTypeView(LoginRequiredMixin, TemplateView):
template_name = "analyses/new_analysis_by_type.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['available_pipelines'] = AVAILABLE_PIPELINES
return context
class AbstractAnalysisFormView(LoginRequiredMixin, CreateView):
form_class = AbstractAnalysisCreateForm
template_name = None
analysis_type = 'AbstractAnalysis'
analysis_description = ''
analysis_create_url = None
def get_form_kwargs(self):
"""Pass request object for form creation"""
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
response = super().form_valid(form)
messages.add_message(
self.request, messages.INFO,
_(
'You just created a %(analysis_type)s analysis! '
'View its detail <a href="%(analysis_detail_url)s">here</a>.'
) % {
'analysis_type': self.analysis_type,
'analysis_detail_url': self.object.get_absolute_url(),
},
extra_tags='safe',
)
return response
|
// ... existing code ...
self.request, messages.INFO,
_(
'You just created a %(analysis_type)s analysis! '
'View its detail <a href="%(analysis_detail_url)s">here</a>.'
) % {
'analysis_type': self.analysis_type,
'analysis_detail_url': self.object.get_absolute_url(),
},
extra_tags='safe',
)
// ... rest of the code ...
|
237b66c8b9cef714b64a75b1f20a79a4357c71b5
|
apps/continiousauth/serializers.py
|
apps/continiousauth/serializers.py
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
Change serializer to omit dates
|
Change serializer to omit dates
|
Python
|
mit
|
larserikgk/mobiauth-server,larserikgk/mobiauth-server,larserikgk/mobiauth-server
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
- fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
+ fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
Change serializer to omit dates
|
## Code Before:
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
## Instruction:
Change serializer to omit dates
## Code After:
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
# ... existing code ...
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
# ... rest of the code ...
|
1ea27e8989657bb35dd37b6ee2e038e1358fbc96
|
social_core/backends/globus.py
|
social_core/backends/globus.py
|
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
Set a JWT signature algorithm for the Globus backend to RS512
|
Set a JWT signature algorithm for the Globus backend to RS512
|
Python
|
bsd-3-clause
|
python-social-auth/social-core,python-social-auth/social-core
|
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
+ JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
Set a JWT signature algorithm for the Globus backend to RS512
|
## Code Before:
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
## Instruction:
Set a JWT signature algorithm for the Globus backend to RS512
## Code After:
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
// ... existing code ...
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
// ... rest of the code ...
|
081b1cd60e363adf941ba511c7683c0ed2843a97
|
gubernator/appengine_config.py
|
gubernator/appengine_config.py
|
import os
from google.appengine.ext import vendor
# Add any libraries installed in the "third_party" folder.
vendor.add('third_party')
# Use remote GCS calls for local development.
if os.environ.get('SERVER_SOFTWARE','').startswith('Development'):
os.environ['SERVER_SOFTWARE'] += ' remote_api'
|
import os
from google.appengine.ext import vendor
from google.appengine.api import urlfetch
# Add any libraries installed in the "third_party" folder.
vendor.add('third_party')
# Use remote GCS calls for local development.
if os.environ.get('SERVER_SOFTWARE','').startswith('Development'):
os.environ['SERVER_SOFTWARE'] += ' remote_api'
# The default deadline of 5 seconds is too aggressive of a target for GCS
# directory listing operations.
urlfetch.set_default_fetch_deadline(60)
|
Increase Gubernator's url fetch deadline -- 5 seconds is too fast!
|
Increase Gubernator's url fetch deadline -- 5 seconds is too fast!
This should avoid a lot of refreshes because some directory listing or
download takes slightly too long.
|
Python
|
apache-2.0
|
mikedanese/test-infra,gmarek/test-infra,mindprince/test-infra,shashidharatd/test-infra,cblecker/test-infra,rmmh/kubernetes-test-infra,mikedanese/test-infra,jlowdermilk/test-infra,shyamjvs/test-infra,mikedanese/test-infra,girishkalele/test-infra,grodrigues3/test-infra,monopole/test-infra,foxish/test-infra,mikedanese/test-infra,shyamjvs/test-infra,spxtr/test-infra,jlowdermilk/test-infra,mtaufen/test-infra,shyamjvs/test-infra,madhusudancs/test-infra,kargakis/test-infra,mtaufen/test-infra,shashidharatd/test-infra,madhusudancs/test-infra,girishkalele/test-infra,nlandolfi/test-infra-1,kewu1992/test-infra,madhusudancs/test-infra,dims/test-infra,kubernetes/test-infra,ixdy/kubernetes-test-infra,brahmaroutu/test-infra,kewu1992/test-infra,dims/test-infra,krousey/test-infra,BenTheElder/test-infra,brahmaroutu/test-infra,maisem/test-infra,kubernetes/test-infra,jessfraz/test-infra,krzyzacy/test-infra,cjwagner/test-infra,dchen1107/test-infra,piosz/test-infra,piosz/test-infra,abgworrall/test-infra,BenTheElder/test-infra,michelle192837/test-infra,pwittrock/test-infra,mtaufen/test-infra,mindprince/test-infra,shashidharatd/test-infra,dchen1107/test-infra,dchen1107/test-infra,kubernetes/test-infra,cblecker/test-infra,dchen1107/test-infra,shyamjvs/test-infra,brahmaroutu/test-infra,cblecker/test-infra,jessfraz/test-infra,grodrigues3/test-infra,shyamjvs/test-infra,fejta/test-infra,abgworrall/test-infra,monopole/test-infra,piosz/test-infra,kewu1992/test-infra,girishkalele/test-infra,dims/test-infra,gmarek/test-infra,michelle192837/test-infra,shyamjvs/test-infra,gmarek/test-infra,nlandolfi/test-infra-1,grodrigues3/test-infra,BenTheElder/test-infra,lavalamp/test-infra,piosz/test-infra,lavalamp/test-infra,abgworrall/test-infra,nlandolfi/test-infra-1,brahmaroutu/test-infra,kargakis/test-infra,jessfraz/test-infra,rmmh/kubernetes-test-infra,abgworrall/test-infra,shashidharatd/test-infra,brahmaroutu/test-infra,cblecker/test-infra,dims/test-infra,cjwagner/test-infra,krzyzacy/test-infra,kewu1992/test-infra,kargakis/test-infra,pwittrock/test-infra,fejta/test-infra,cjwagner/test-infra,gmarek/test-infra,pwittrock/test-infra,girishkalele/test-infra,fejta/test-infra,spxtr/test-infra,piosz/test-infra,lavalamp/test-infra,pwittrock/test-infra,brahmaroutu/test-infra,nlandolfi/test-infra-1,BenTheElder/test-infra,jessfraz/test-infra,pwittrock/test-infra,krousey/test-infra,gmarek/test-infra,krzyzacy/test-infra,jlowdermilk/test-infra,kubernetes/test-infra,ixdy/kubernetes-test-infra,monopole/test-infra,foxish/test-infra,monopole/test-infra,michelle192837/test-infra,mindprince/test-infra,kewu1992/test-infra,krousey/test-infra,krzyzacy/test-infra,cblecker/test-infra,rmmh/kubernetes-test-infra,mindprince/test-infra,monopole/test-infra,kargakis/test-infra,michelle192837/test-infra,spxtr/test-infra,dims/test-infra,monopole/test-infra,spxtr/test-infra,jlowdermilk/test-infra,michelle192837/test-infra,maisem/test-infra,jessfraz/test-infra,dims/test-infra,grodrigues3/test-infra,rmmh/kubernetes-test-infra,fejta/test-infra,ixdy/kubernetes-test-infra,cjwagner/test-infra,girishkalele/test-infra,abgworrall/test-infra,maisem/test-infra,maisem/test-infra,lavalamp/test-infra,grodrigues3/test-infra,shashidharatd/test-infra,maisem/test-infra,mindprince/test-infra,jessfraz/test-infra,mikedanese/test-infra,BenTheElder/test-infra,krousey/test-infra,cjwagner/test-infra,madhusudancs/test-infra,foxish/test-infra,kubernetes/test-infra,kubernetes/test-infra,rmmh/kubernetes-test-infra,cblecker/test-infra,kargakis/test-infra,jlowdermilk/test-infra,lavalamp/test-infra,krzyzacy/test-infra,lavalamp/test-infra,foxish/test-infra,ixdy/kubernetes-test-infra,nlandolfi/test-infra-1,BenTheElder/test-infra,madhusudancs/test-infra,foxish/test-infra,mtaufen/test-infra,spxtr/test-infra,michelle192837/test-infra,fejta/test-infra,fejta/test-infra,mtaufen/test-infra,krzyzacy/test-infra,krousey/test-infra,kargakis/test-infra,dchen1107/test-infra,cjwagner/test-infra,jlowdermilk/test-infra,ixdy/kubernetes-test-infra
|
import os
from google.appengine.ext import vendor
+ from google.appengine.api import urlfetch
# Add any libraries installed in the "third_party" folder.
vendor.add('third_party')
# Use remote GCS calls for local development.
if os.environ.get('SERVER_SOFTWARE','').startswith('Development'):
os.environ['SERVER_SOFTWARE'] += ' remote_api'
+ # The default deadline of 5 seconds is too aggressive of a target for GCS
+ # directory listing operations.
+ urlfetch.set_default_fetch_deadline(60)
+
|
Increase Gubernator's url fetch deadline -- 5 seconds is too fast!
|
## Code Before:
import os
from google.appengine.ext import vendor
# Add any libraries installed in the "third_party" folder.
vendor.add('third_party')
# Use remote GCS calls for local development.
if os.environ.get('SERVER_SOFTWARE','').startswith('Development'):
os.environ['SERVER_SOFTWARE'] += ' remote_api'
## Instruction:
Increase Gubernator's url fetch deadline -- 5 seconds is too fast!
## Code After:
import os
from google.appengine.ext import vendor
from google.appengine.api import urlfetch
# Add any libraries installed in the "third_party" folder.
vendor.add('third_party')
# Use remote GCS calls for local development.
if os.environ.get('SERVER_SOFTWARE','').startswith('Development'):
os.environ['SERVER_SOFTWARE'] += ' remote_api'
# The default deadline of 5 seconds is too aggressive of a target for GCS
# directory listing operations.
urlfetch.set_default_fetch_deadline(60)
|
# ... existing code ...
from google.appengine.ext import vendor
from google.appengine.api import urlfetch
# ... modified code ...
os.environ['SERVER_SOFTWARE'] += ' remote_api'
# The default deadline of 5 seconds is too aggressive of a target for GCS
# directory listing operations.
urlfetch.set_default_fetch_deadline(60)
# ... rest of the code ...
|
6932164f20ced80ff6d08402b84aba954a983e2d
|
iota/commands/extended/get_transaction_objects.py
|
iota/commands/extended/get_transaction_objects.py
|
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
from iota.filters import Trytes
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
See :py:meth:`iota.api.StrictIota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
.get('hashes') # type: Optional[Iterable[TransactionHash]]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
f.Required | f.Array | f.FilterRepeater(
f.Required |
Trytes(TransactionHash) |
f.Unicode(encoding='ascii', normalize=False),
),
})
|
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
from iota.filters import StringifiedTrytesArray
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
See :py:meth:`iota.api.Iota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
.get('hashes') # type: Iterable[TransactionHash]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
StringifiedTrytesArray(TransactionHash) | f.Required
})
|
Use filter macro for request validation
|
Use filter macro for request validation
StringifiedTrytesArray(Type) filter macro was
introduced in #243. Becasue of this, no request
filter test case is needed, hence the macro is
covered already in other test cases.
|
Python
|
mit
|
iotaledger/iota.lib.py
|
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
- from iota.filters import Trytes
+ from iota.filters import StringifiedTrytesArray
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
- See :py:meth:`iota.api.StrictIota.get_transaction_objects`.
+ See :py:meth:`iota.api.Iota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
- .get('hashes') # type: Optional[Iterable[TransactionHash]]
+ .get('hashes') # type: Iterable[TransactionHash]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
+ StringifiedTrytesArray(TransactionHash) | f.Required
- f.Required | f.Array | f.FilterRepeater(
- f.Required |
- Trytes(TransactionHash) |
- f.Unicode(encoding='ascii', normalize=False),
- ),
})
|
Use filter macro for request validation
|
## Code Before:
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
from iota.filters import Trytes
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
See :py:meth:`iota.api.StrictIota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
.get('hashes') # type: Optional[Iterable[TransactionHash]]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
f.Required | f.Array | f.FilterRepeater(
f.Required |
Trytes(TransactionHash) |
f.Unicode(encoding='ascii', normalize=False),
),
})
## Instruction:
Use filter macro for request validation
## Code After:
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
from iota.filters import StringifiedTrytesArray
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
See :py:meth:`iota.api.Iota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
.get('hashes') # type: Iterable[TransactionHash]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
StringifiedTrytesArray(TransactionHash) | f.Required
})
|
# ... existing code ...
from iota.commands import FilterCommand, RequestFilter
from iota.filters import StringifiedTrytesArray
# ... modified code ...
See :py:meth:`iota.api.Iota.get_transaction_objects`.
"""
...
hashes = request\
.get('hashes') # type: Iterable[TransactionHash]
...
'hashes':
StringifiedTrytesArray(TransactionHash) | f.Required
})
# ... rest of the code ...
|
679f20fc8747020f08f1e18a47772b18d886d29f
|
circuit/_twisted.py
|
circuit/_twisted.py
|
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
Remove print statements from TwistedCircuitBreaker
|
Remove print statements from TwistedCircuitBreaker
|
Python
|
apache-2.0
|
edgeware/python-circuit
|
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
- print "EXIT"
if exc_type is defer._DefGen_Return:
- print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
Remove print statements from TwistedCircuitBreaker
|
## Code Before:
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
## Instruction:
Remove print statements from TwistedCircuitBreaker
## Code After:
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
# ... existing code ...
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
# ... rest of the code ...
|
eaae2a1e88572e224621e242be1d15e92065f15e
|
mopidy_nad/__init__.py
|
mopidy_nad/__init__.py
|
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def register_gstreamer_elements(self):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
|
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def setup(self, registry):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
|
Use new extension setup() API
|
Use new extension setup() API
|
Python
|
apache-2.0
|
ZenithDK/mopidy-primare,mopidy/mopidy-nad
|
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
- def register_gstreamer_elements(self):
+ def setup(self, registry):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
|
Use new extension setup() API
|
## Code Before:
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def register_gstreamer_elements(self):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
## Instruction:
Use new extension setup() API
## Code After:
from __future__ import unicode_literals
import os
import pygst
pygst.require('0.10')
import gst
import gobject
from mopidy import config, ext
__version__ = '1.0.0'
class Extension(ext.Extension):
dist_name = 'Mopidy-NAD'
ext_name = 'nad'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def setup(self, registry):
from .mixer import NadMixer
gobject.type_register(NadMixer)
gst.element_register(NadMixer, 'nadmixer', gst.RANK_MARGINAL)
|
...
def setup(self, registry):
from .mixer import NadMixer
...
|
f7f6a8a1b1f019b45b9f3c3c9c6124469a335798
|
phildb_client/__init__.py
|
phildb_client/__init__.py
|
from client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
from phildb_client.client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
Make import of client module explicit
|
Make import of client module explicit
|
Python
|
bsd-3-clause
|
amacd31/phildb_client
|
- from client import PhilDBClient
+ from phildb_client.client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
Make import of client module explicit
|
## Code Before:
from client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Instruction:
Make import of client module explicit
## Code After:
from phildb_client.client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
...
from phildb_client.client import PhilDBClient
...
|
39a743463f55c3cfbbea05b4d471d01f66dd93f8
|
permamodel/tests/test_perma_base.py
|
permamodel/tests/test_perma_base.py
|
from permamodel.components import frost_number
import os
import numpy as np
from .. import permamodel_directory, data_directory, examples_directory
def test_directory_names_are_set():
assert(permamodel_directory is not None)
|
import os
from nose.tools import assert_true
from .. import (permamodel_directory, data_directory,
examples_directory, tests_directory)
def test_permamodel_directory_is_set():
assert(permamodel_directory is not None)
def test_data_directory_is_set():
assert(data_directory is not None)
def test_examples_directory_is_set():
assert(examples_directory is not None)
def test_tests_directory_is_set():
assert(tests_directory is not None)
def test_permamodel_directory_exists():
assert_true(os.path.isdir(permamodel_directory))
def test_data_directory_exists():
assert_true(os.path.isdir(data_directory))
def test_examples_directory_exists():
assert_true(os.path.isdir(examples_directory))
def test_tests_directory_exists():
assert_true(os.path.isdir(tests_directory))
|
Add unit tests for all package directories
|
Add unit tests for all package directories
|
Python
|
mit
|
permamodel/permamodel,permamodel/permamodel
|
- from permamodel.components import frost_number
import os
- import numpy as np
+ from nose.tools import assert_true
- from .. import permamodel_directory, data_directory, examples_directory
+ from .. import (permamodel_directory, data_directory,
+ examples_directory, tests_directory)
- def test_directory_names_are_set():
+
+ def test_permamodel_directory_is_set():
assert(permamodel_directory is not None)
+ def test_data_directory_is_set():
+ assert(data_directory is not None)
+
+
+ def test_examples_directory_is_set():
+ assert(examples_directory is not None)
+
+
+ def test_tests_directory_is_set():
+ assert(tests_directory is not None)
+
+
+ def test_permamodel_directory_exists():
+ assert_true(os.path.isdir(permamodel_directory))
+
+
+ def test_data_directory_exists():
+ assert_true(os.path.isdir(data_directory))
+
+
+ def test_examples_directory_exists():
+ assert_true(os.path.isdir(examples_directory))
+
+
+ def test_tests_directory_exists():
+ assert_true(os.path.isdir(tests_directory))
+
|
Add unit tests for all package directories
|
## Code Before:
from permamodel.components import frost_number
import os
import numpy as np
from .. import permamodel_directory, data_directory, examples_directory
def test_directory_names_are_set():
assert(permamodel_directory is not None)
## Instruction:
Add unit tests for all package directories
## Code After:
import os
from nose.tools import assert_true
from .. import (permamodel_directory, data_directory,
examples_directory, tests_directory)
def test_permamodel_directory_is_set():
assert(permamodel_directory is not None)
def test_data_directory_is_set():
assert(data_directory is not None)
def test_examples_directory_is_set():
assert(examples_directory is not None)
def test_tests_directory_is_set():
assert(tests_directory is not None)
def test_permamodel_directory_exists():
assert_true(os.path.isdir(permamodel_directory))
def test_data_directory_exists():
assert_true(os.path.isdir(data_directory))
def test_examples_directory_exists():
assert_true(os.path.isdir(examples_directory))
def test_tests_directory_exists():
assert_true(os.path.isdir(tests_directory))
|
# ... existing code ...
import os
from nose.tools import assert_true
from .. import (permamodel_directory, data_directory,
examples_directory, tests_directory)
def test_permamodel_directory_is_set():
assert(permamodel_directory is not None)
# ... modified code ...
def test_data_directory_is_set():
assert(data_directory is not None)
def test_examples_directory_is_set():
assert(examples_directory is not None)
def test_tests_directory_is_set():
assert(tests_directory is not None)
def test_permamodel_directory_exists():
assert_true(os.path.isdir(permamodel_directory))
def test_data_directory_exists():
assert_true(os.path.isdir(data_directory))
def test_examples_directory_exists():
assert_true(os.path.isdir(examples_directory))
def test_tests_directory_exists():
assert_true(os.path.isdir(tests_directory))
# ... rest of the code ...
|
a692c339983ae0252577635751b67324985275dc
|
background_hang_reporter_job/tracked.py
|
background_hang_reporter_job/tracked.py
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for frame, lib in stack)
class ActivityStreamHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame
for frame, lib in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for frame, lib in stack)
class ActivityStreamHangs(object):
title = "Activity Stream Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame
for frame, lib in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
|
Fix Activity Stream category title
|
Fix Activity Stream category title
|
Python
|
mit
|
squarewave/background-hang-reporter-job,squarewave/background-hang-reporter-job
|
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for frame, lib in stack)
class ActivityStreamHangs(object):
- title = "Devtools Hangs"
+ title = "Activity Stream Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame
for frame, lib in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
|
Fix Activity Stream category title
|
## Code Before:
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for frame, lib in stack)
class ActivityStreamHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame
for frame, lib in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
## Instruction:
Fix Activity Stream category title
## Code After:
class AllHangs(object):
title = "All Hangs"
@staticmethod
def matches_hang(_):
return True
class DevtoolsHangs(object):
title = "Devtools Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "devtools/" in frame
for frame, lib in stack)
class ActivityStreamHangs(object):
title = "Activity Stream Hangs"
@staticmethod
def matches_hang(hang):
#pylint: disable=unused-variable
stack, duration, thread, runnable, process, annotations, build_date, platform = hang
return stack is not None and any(isinstance(frame, basestring) and "activity-stream/" in frame
for frame, lib in stack)
def get_tracked_stats():
return [AllHangs, DevtoolsHangs, ActivityStreamHangs]
|
# ... existing code ...
class ActivityStreamHangs(object):
title = "Activity Stream Hangs"
# ... rest of the code ...
|
d2da0b71c36f32305ef55e2cbbf2041eb7b06cf6
|
Project/tools/lib.py
|
Project/tools/lib.py
|
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
Use modification, not creation times to determine relative newness.
|
Use modification, not creation times to determine relative newness.
|
Python
|
mit
|
holdenweb/nbtools,holdenweb/nbtools
|
import os
def newer(file1, file2):
- file1_creation = os.stat(file1).st_mtime
+ file1_modification = os.stat(file1).st_mtime
- file2_creation = os.stat(file2).st_mtime
+ file2_modification = os.stat(file2).st_mtime
- return file1_creation > file2_creation
+ return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
Use modification, not creation times to determine relative newness.
|
## Code Before:
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
## Instruction:
Use modification, not creation times to determine relative newness.
## Code After:
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
# ... existing code ...
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
# ... rest of the code ...
|
a013927ee9772e05ae4255cff98ecfe4819f205c
|
flask_app/__init__.py
|
flask_app/__init__.py
|
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
|
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
|
Set login view for login_required
|
Set login view for login_required
|
Python
|
mit
|
szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft
|
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
+ login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
|
Set login view for login_required
|
## Code Before:
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
## Instruction:
Set login view for login_required
## Code After:
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
|
...
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
...
|
569840b37f9e43bec0de3f6ddadc89d6a2f9e17b
|
traceview/formatters.py
|
traceview/formatters.py
|
from collections import namedtuple
def identity(results):
return results
def tuplify(results, class_name='Result'):
if 'fields' in results and 'items' in results:
return _tuplify_timeseries(results, class_name)
return _tuplify_dict(results, class_name)
def _tuplify_timeseries(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results['fields'])
return map(nt._make, results['items'])
def _tuplify_dict(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results.keys())
return nt(**results)
|
from collections import namedtuple
def identity(results):
return results
def tuplify(results, class_name='Result'):
if 'fields' in results and 'items' in results:
return _tuplify_timeseries(results, class_name)
return _tuplify_dict(results, class_name)
def _tuplify_timeseries(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results['fields'])
return [nt(*item) for item in results['items']]
def _tuplify_dict(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results.keys())
return nt(**results)
|
Replace map with a list comprehension.
|
Replace map with a list comprehension.
|
Python
|
mit
|
danriti/python-traceview
|
from collections import namedtuple
def identity(results):
return results
def tuplify(results, class_name='Result'):
if 'fields' in results and 'items' in results:
return _tuplify_timeseries(results, class_name)
return _tuplify_dict(results, class_name)
def _tuplify_timeseries(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results['fields'])
- return map(nt._make, results['items'])
+ return [nt(*item) for item in results['items']]
def _tuplify_dict(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results.keys())
return nt(**results)
|
Replace map with a list comprehension.
|
## Code Before:
from collections import namedtuple
def identity(results):
return results
def tuplify(results, class_name='Result'):
if 'fields' in results and 'items' in results:
return _tuplify_timeseries(results, class_name)
return _tuplify_dict(results, class_name)
def _tuplify_timeseries(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results['fields'])
return map(nt._make, results['items'])
def _tuplify_dict(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results.keys())
return nt(**results)
## Instruction:
Replace map with a list comprehension.
## Code After:
from collections import namedtuple
def identity(results):
return results
def tuplify(results, class_name='Result'):
if 'fields' in results and 'items' in results:
return _tuplify_timeseries(results, class_name)
return _tuplify_dict(results, class_name)
def _tuplify_timeseries(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results['fields'])
return [nt(*item) for item in results['items']]
def _tuplify_dict(results, class_name):
tuple_name = '{name}Tuple'.format(name=class_name)
nt = namedtuple(tuple_name, results.keys())
return nt(**results)
|
// ... existing code ...
nt = namedtuple(tuple_name, results['fields'])
return [nt(*item) for item in results['items']]
// ... rest of the code ...
|
30b6d886670b7ba65aee9b130ec50d577c778649
|
run_server.py
|
run_server.py
|
import subprocess
import sys
def main():
ip = '127.0.0.1'
port = 5000
workers_count = 4
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
if ':' in arg:
ip, port = arg.split(':')
port = int(port)
break
if '.' in arg:
ip = arg
if arg.isdigit():
port = int(arg)
subprocess.run('gunicorn -w {workers_count} -b {ip}:{port} flucalc.server:app'.format(
workers_count=workers_count, ip=ip, port=port
), shell=True)
if __name__ == '__main__':
main()
|
import subprocess
import sys
def main():
ip = '127.0.0.1'
port = 5000
workers_count = 4
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
if ':' in arg:
ip, port = arg.split(':')
port = int(port)
break
if '.' in arg:
ip = arg
if arg.isdigit():
port = int(arg)
print('FluCalc started on {ip}:{port}'.format(ip=ip, port=port))
subprocess.run('gunicorn -w {workers_count} -b {ip}:{port} flucalc.server:app'.format(
workers_count=workers_count, ip=ip, port=port
), shell=True)
if __name__ == '__main__':
main()
|
Add a message with a socket on server start
|
Add a message with a socket on server start
|
Python
|
mit
|
bondarevts/flucalc,bondarevts/flucalc,bondarevts/flucalc
|
import subprocess
import sys
def main():
ip = '127.0.0.1'
port = 5000
workers_count = 4
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
if ':' in arg:
ip, port = arg.split(':')
port = int(port)
break
if '.' in arg:
ip = arg
if arg.isdigit():
port = int(arg)
+ print('FluCalc started on {ip}:{port}'.format(ip=ip, port=port))
subprocess.run('gunicorn -w {workers_count} -b {ip}:{port} flucalc.server:app'.format(
workers_count=workers_count, ip=ip, port=port
), shell=True)
if __name__ == '__main__':
main()
|
Add a message with a socket on server start
|
## Code Before:
import subprocess
import sys
def main():
ip = '127.0.0.1'
port = 5000
workers_count = 4
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
if ':' in arg:
ip, port = arg.split(':')
port = int(port)
break
if '.' in arg:
ip = arg
if arg.isdigit():
port = int(arg)
subprocess.run('gunicorn -w {workers_count} -b {ip}:{port} flucalc.server:app'.format(
workers_count=workers_count, ip=ip, port=port
), shell=True)
if __name__ == '__main__':
main()
## Instruction:
Add a message with a socket on server start
## Code After:
import subprocess
import sys
def main():
ip = '127.0.0.1'
port = 5000
workers_count = 4
if len(sys.argv) > 1:
for arg in sys.argv[1:]:
if ':' in arg:
ip, port = arg.split(':')
port = int(port)
break
if '.' in arg:
ip = arg
if arg.isdigit():
port = int(arg)
print('FluCalc started on {ip}:{port}'.format(ip=ip, port=port))
subprocess.run('gunicorn -w {workers_count} -b {ip}:{port} flucalc.server:app'.format(
workers_count=workers_count, ip=ip, port=port
), shell=True)
if __name__ == '__main__':
main()
|
# ... existing code ...
print('FluCalc started on {ip}:{port}'.format(ip=ip, port=port))
subprocess.run('gunicorn -w {workers_count} -b {ip}:{port} flucalc.server:app'.format(
# ... rest of the code ...
|
f6ddd5c4d79ada59d9db4b467849d9b52c5fef75
|
landlab/field/__init__.py
|
landlab/field/__init__.py
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
Add GraphFields to package import.
|
Add GraphFields to package import.
|
Python
|
mit
|
cmshobe/landlab,cmshobe/landlab,cmshobe/landlab,RondaStrauch/landlab,amandersillinois/landlab,RondaStrauch/landlab,landlab/landlab,Carralex/landlab,RondaStrauch/landlab,landlab/landlab,amandersillinois/landlab,csherwood-usgs/landlab,Carralex/landlab,Carralex/landlab,csherwood-usgs/landlab,landlab/landlab
|
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
+ from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
- 'FieldError', 'GroupError', 'GroupSizeError']
+ 'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
Add GraphFields to package import.
|
## Code Before:
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
## Instruction:
Add GraphFields to package import.
## Code After:
from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
|
...
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
...
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
...
|
d3a9657b7318327a59c3eee08a25f1e5c4ba4edf
|
django_casscache.py
|
django_casscache.py
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
Add a method to noop the make_key in Django
|
Add a method to noop the make_key in Django
|
Python
|
bsd-3-clause
|
mattrobenolt/django-casscache
|
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
+
+ def noop_make_key(key, *args, **kwargs):
+ """
+ For use with KEY_FUNCTION, to not alter the key name at all.
+ """
+ return key
+
|
Add a method to noop the make_key in Django
|
## Code Before:
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
## Instruction:
Add a method to noop the make_key in Django
## Code After:
from django.core.cache.backends.memcached import BaseMemcachedCache
class CasscacheCache(BaseMemcachedCache):
"An implementation of a cache binding using casscache"
def __init__(self, server, params):
import casscache
super(CasscacheCache, self).__init__(server, params,
library=casscache,
value_not_found_exception=ValueError)
@property
def _cache(self):
if getattr(self, '_client', None) is None:
keyspace = self._options.pop('keyspace')
columnfamily = self._options.pop('columnfamily')
self._client = self._lib.Client(self._servers,
keyspace=keyspace,
columnfamily=columnfamily,
**self._options)
return self._client
def _get_memcache_timeout(self, timeout):
return timeout or 0
def close(self, **kwargs):
# Lol, Django wants to close the connection after every request.
# This is 100% not needed for Cassandra.
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
|
# ... existing code ...
pass
def noop_make_key(key, *args, **kwargs):
"""
For use with KEY_FUNCTION, to not alter the key name at all.
"""
return key
# ... rest of the code ...
|
dd171296a980dcc0349cf54b2afd6d2399cfb981
|
numba/tests/matmul_usecase.py
|
numba/tests/matmul_usecase.py
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
Fix test failure on Numpy 1.9 and Python 3.5
|
Fix test failure on Numpy 1.9 and Python 3.5
The "@" operator between arrays is only supported by Numpy 1.10+.
|
Python
|
bsd-2-clause
|
numba/numba,cpcloud/numba,stuartarchibald/numba,numba/numba,stefanseefeld/numba,gmarkall/numba,sklam/numba,stefanseefeld/numba,stefanseefeld/numba,jriehl/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,cpcloud/numba,sklam/numba,cpcloud/numba,stefanseefeld/numba,sklam/numba,seibert/numba,gmarkall/numba,sklam/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,stonebig/numba,stonebig/numba,stuartarchibald/numba,seibert/numba,seibert/numba,jriehl/numba,stuartarchibald/numba,stonebig/numba,numba/numba,IntelLabs/numba,jriehl/numba,numba/numba,sklam/numba,gmarkall/numba,cpcloud/numba,cpcloud/numba,stonebig/numba,jriehl/numba,stefanseefeld/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,gmarkall/numba
|
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
+ from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
- has_matmul = sys.version_info >= (3, 5)
+ # It is only supported by Numpy 1.10+.
+ has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
+ has_matmul,
- has_matmul, "the matrix multiplication operator needs Python 3.5+")
+ "the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
Fix test failure on Numpy 1.9 and Python 3.5
|
## Code Before:
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
# The "@" operator only compiles on Python 3.5+.
has_matmul = sys.version_info >= (3, 5)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul, "the matrix multiplication operator needs Python 3.5+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
## Instruction:
Fix test failure on Numpy 1.9 and Python 3.5
## Code After:
import sys
try:
import scipy.linalg.cython_blas
has_blas = True
except ImportError:
has_blas = False
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
if has_matmul:
code = """if 1:
def matmul_usecase(x, y):
return x @ y
def imatmul_usecase(x, y):
x @= y
return x
"""
co = compile(code, "<string>", "exec")
ns = {}
eval(co, globals(), ns)
globals().update(ns)
del code, co, ns
else:
matmul_usecase = None
imatmul_usecase = None
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
needs_blas = unittest.skipUnless(has_blas, "BLAS needs Scipy 0.16+")
class DumbMatrix(object):
def __init__(self, value):
self.value = value
def __matmul__(self, other):
if isinstance(other, DumbMatrix):
return DumbMatrix(self.value * other.value)
return NotImplemented
def __imatmul__(self, other):
if isinstance(other, DumbMatrix):
self.value *= other.value
return self
return NotImplemented
|
// ... existing code ...
import numba.unittest_support as unittest
from numba.numpy_support import version as numpy_version
// ... modified code ...
# The "@" operator only compiles on Python 3.5+.
# It is only supported by Numpy 1.10+.
has_matmul = sys.version_info >= (3, 5) and numpy_version >= (1, 10)
...
needs_matmul = unittest.skipUnless(
has_matmul,
"the matrix multiplication operator needs Python 3.5+ and Numpy 1.10+")
// ... rest of the code ...
|
4987b2e5a2d5ee208a274702f6b88a9021149c86
|
tests/blueprints/user_message/test_address_formatting.py
|
tests/blueprints/user_message/test_address_formatting.py
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
Speed up user message address formatting test
|
Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work.
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
- def test_recipient_formatting(application, params):
+ def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
-
- create_email_config()
-
- brand = create_brand()
- party = create_party(brand.id)
-
- site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
- @pytest.fixture
- def application(db):
+ @pytest.fixture(scope='module')
+ def site(db):
with app_context():
with database_recreated(db):
- yield
+ create_email_config()
+ brand = create_brand()
+ party = create_party(brand.id)
+
+ site = create_site(party.id)
+
+ yield site
+
|
Speed up user message address formatting test
|
## Code Before:
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
## Instruction:
Speed up user message address formatting test
## Code After:
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', '[email protected]', 'Alice <[email protected]>'),
('<AngleInvestor>', '[email protected]', '"<AngleInvestor>" <[email protected]>'),
('-=]YOLO[=-', '[email protected]', '"-=]YOLO[=-" <[email protected]>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
// ... existing code ...
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
// ... modified code ...
@pytest.fixture(scope='module')
def site(db):
with app_context():
...
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
// ... rest of the code ...
|
d4ef63250075dbbefbeed4bb37e8679f1ae2495f
|
tms/__init__.py
|
tms/__init__.py
|
from tms.workday import WorkDay
from tms.scraper import scraper
from tms.workweek import WorkWeek
|
from tms.workday import WorkDay
from tms.scraper import scraper
from tms.workweek import WorkWeek
from tms.breakrule import BreakRule
|
Change to account for the move of the breakrule call
|
Change to account for the move of the breakrule call
|
Python
|
mit
|
marmstr93ng/TimeManagementSystem,marmstr93ng/TimeManagementSystem
|
from tms.workday import WorkDay
from tms.scraper import scraper
from tms.workweek import WorkWeek
+ from tms.breakrule import BreakRule
|
Change to account for the move of the breakrule call
|
## Code Before:
from tms.workday import WorkDay
from tms.scraper import scraper
from tms.workweek import WorkWeek
## Instruction:
Change to account for the move of the breakrule call
## Code After:
from tms.workday import WorkDay
from tms.scraper import scraper
from tms.workweek import WorkWeek
from tms.breakrule import BreakRule
|
...
from tms.workweek import WorkWeek
from tms.breakrule import BreakRule
...
|
c201fc0feef5f7eeede327d6239fc3082ae24180
|
server/worker/queue.py
|
server/worker/queue.py
|
"""Process queues."""
from datetime import datetime
from server.extensions import db
from server.models import QueueEntry
def finished_entries():
"""Process finished entries."""
queue_entries = db.session.query(QueueEntry) \
.filter(QueueEntry.finishes_at <= datetime.now()) \
.all()
for entry in queue_entries:
if entry.module:
entry.module.level = entry.level
db.session.add(entry.module)
entry.module.pod.update_resources()
elif entry.research:
entry.research.level += 1
entry.research.researched = True
db.session.add(entry.research)
queue = entry.queue
db.session.delete(entry)
queue = queue.next_entry()
db.session.commit()
|
"""Process queues."""
from datetime import datetime
from server.extensions import db
from server.models import QueueEntry
def finished_entries():
"""Process finished entries."""
queue_entries = db.session.query(QueueEntry) \
.filter(QueueEntry.finishes_at <= datetime.now()) \
.all()
for entry in queue_entries:
if entry.module:
entry.module.level = entry.level
db.session.add(entry.module)
entry.module.pod.update_resources()
elif entry.research:
entry.research.level = entry.level
entry.research.researched = True
db.session.add(entry.research)
queue = entry.queue
db.session.delete(entry)
queue = queue.next_entry()
db.session.commit()
|
Set research level in ticker
|
Set research level in ticker
|
Python
|
mit
|
Nukesor/spacesurvival,Nukesor/spacesurvival,Nukesor/spacesurvival,Nukesor/spacesurvival
|
"""Process queues."""
from datetime import datetime
from server.extensions import db
from server.models import QueueEntry
def finished_entries():
"""Process finished entries."""
queue_entries = db.session.query(QueueEntry) \
.filter(QueueEntry.finishes_at <= datetime.now()) \
.all()
for entry in queue_entries:
if entry.module:
entry.module.level = entry.level
db.session.add(entry.module)
entry.module.pod.update_resources()
elif entry.research:
- entry.research.level += 1
+ entry.research.level = entry.level
entry.research.researched = True
db.session.add(entry.research)
queue = entry.queue
db.session.delete(entry)
queue = queue.next_entry()
db.session.commit()
|
Set research level in ticker
|
## Code Before:
"""Process queues."""
from datetime import datetime
from server.extensions import db
from server.models import QueueEntry
def finished_entries():
"""Process finished entries."""
queue_entries = db.session.query(QueueEntry) \
.filter(QueueEntry.finishes_at <= datetime.now()) \
.all()
for entry in queue_entries:
if entry.module:
entry.module.level = entry.level
db.session.add(entry.module)
entry.module.pod.update_resources()
elif entry.research:
entry.research.level += 1
entry.research.researched = True
db.session.add(entry.research)
queue = entry.queue
db.session.delete(entry)
queue = queue.next_entry()
db.session.commit()
## Instruction:
Set research level in ticker
## Code After:
"""Process queues."""
from datetime import datetime
from server.extensions import db
from server.models import QueueEntry
def finished_entries():
"""Process finished entries."""
queue_entries = db.session.query(QueueEntry) \
.filter(QueueEntry.finishes_at <= datetime.now()) \
.all()
for entry in queue_entries:
if entry.module:
entry.module.level = entry.level
db.session.add(entry.module)
entry.module.pod.update_resources()
elif entry.research:
entry.research.level = entry.level
entry.research.researched = True
db.session.add(entry.research)
queue = entry.queue
db.session.delete(entry)
queue = queue.next_entry()
db.session.commit()
|
# ... existing code ...
elif entry.research:
entry.research.level = entry.level
entry.research.researched = True
# ... rest of the code ...
|
6eacc0a0736edc410c011003986831b5f58da03e
|
tests/command_test.py
|
tests/command_test.py
|
import os, sys
import unittest
class SqliteTest(unittest.TestCase):
TESTING_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testing_dir'))
def test_command(self):
command = 'diary generate sqlite %s/log.sqlite3' % self.TESTING_DIR
os.system(command)
self.assertTrue(os.path.exists(os.path.join(self.TESTING_DIR, 'log.sqlite3')))
def test_command_no_path_given(self):
command = 'diary generate sqlite'
os.system(command)
target_path = os.path.join(os.getcwd(), 'log.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_specific_name_given(self):
command = 'diary generate sqlite mylog.sqlite3'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
if __name__ == '__main__':
unittest.main()
|
import os, sys
import unittest
class SqliteTest(unittest.TestCase):
TESTING_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testing_dir'))
def test_command(self):
command = 'diary generate sqlite %s/log.sqlite3' % self.TESTING_DIR
os.system(command)
self.assertTrue(os.path.exists(os.path.join(self.TESTING_DIR, 'log.sqlite3')))
def test_command_no_path_given(self):
command = 'diary generate sqlite'
os.system(command)
target_path = os.path.join(os.getcwd(), 'log.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_specific_name_given(self):
command = 'diary generate sqlite mylog.sqlite3'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_weird_extension(self):
command = 'diary generate sqlite mylog.diary.log'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.diary.log')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
if __name__ == '__main__':
unittest.main()
|
Add test for odd extension to diary command
|
Add test for odd extension to diary command
|
Python
|
mit
|
GreenVars/diary
|
import os, sys
import unittest
class SqliteTest(unittest.TestCase):
TESTING_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testing_dir'))
def test_command(self):
command = 'diary generate sqlite %s/log.sqlite3' % self.TESTING_DIR
os.system(command)
self.assertTrue(os.path.exists(os.path.join(self.TESTING_DIR, 'log.sqlite3')))
def test_command_no_path_given(self):
command = 'diary generate sqlite'
os.system(command)
target_path = os.path.join(os.getcwd(), 'log.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_specific_name_given(self):
command = 'diary generate sqlite mylog.sqlite3'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
+ def test_command_weird_extension(self):
+ command = 'diary generate sqlite mylog.diary.log'
+ os.system(command)
+ target_path = os.path.join(os.getcwd(), 'mylog.diary.log')
+ self.assertTrue(os.path.exists(target_path))
+ os.remove(target_path)
if __name__ == '__main__':
unittest.main()
|
Add test for odd extension to diary command
|
## Code Before:
import os, sys
import unittest
class SqliteTest(unittest.TestCase):
TESTING_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testing_dir'))
def test_command(self):
command = 'diary generate sqlite %s/log.sqlite3' % self.TESTING_DIR
os.system(command)
self.assertTrue(os.path.exists(os.path.join(self.TESTING_DIR, 'log.sqlite3')))
def test_command_no_path_given(self):
command = 'diary generate sqlite'
os.system(command)
target_path = os.path.join(os.getcwd(), 'log.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_specific_name_given(self):
command = 'diary generate sqlite mylog.sqlite3'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add test for odd extension to diary command
## Code After:
import os, sys
import unittest
class SqliteTest(unittest.TestCase):
TESTING_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), 'testing_dir'))
def test_command(self):
command = 'diary generate sqlite %s/log.sqlite3' % self.TESTING_DIR
os.system(command)
self.assertTrue(os.path.exists(os.path.join(self.TESTING_DIR, 'log.sqlite3')))
def test_command_no_path_given(self):
command = 'diary generate sqlite'
os.system(command)
target_path = os.path.join(os.getcwd(), 'log.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_specific_name_given(self):
command = 'diary generate sqlite mylog.sqlite3'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.sqlite3')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
def test_command_weird_extension(self):
command = 'diary generate sqlite mylog.diary.log'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.diary.log')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
if __name__ == '__main__':
unittest.main()
|
...
def test_command_weird_extension(self):
command = 'diary generate sqlite mylog.diary.log'
os.system(command)
target_path = os.path.join(os.getcwd(), 'mylog.diary.log')
self.assertTrue(os.path.exists(target_path))
os.remove(target_path)
...
|
6785f6ef2287bc161085bcca7f1cb8653b88a433
|
resolwe/flow/management/commands/cleantestdir.py
|
resolwe/flow/management/commands/cleantestdir.py
|
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if volume_config["config"].get("read_only", False) == False
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if not volume_config["config"].get("read_only", False)
and volume_config["type"] == "host_path"
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
Clean only volumes of type host_path
|
Clean only volumes of type host_path
|
Python
|
apache-2.0
|
genialis/resolwe,genialis/resolwe
|
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
- if volume_config["config"].get("read_only", False) == False
+ if not volume_config["config"].get("read_only", False)
+ and volume_config["type"] == "host_path"
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
Clean only volumes of type host_path
|
## Code Before:
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if volume_config["config"].get("read_only", False) == False
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
## Instruction:
Clean only volumes of type host_path
## Code After:
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if not volume_config["config"].get("read_only", False)
and volume_config["type"] == "host_path"
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
# ... existing code ...
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if not volume_config["config"].get("read_only", False)
and volume_config["type"] == "host_path"
]
# ... rest of the code ...
|
66ba9aa2172fbed67b67a06acb331d449d32a33c
|
tests/services/shop/conftest.py
|
tests/services/shop/conftest.py
|
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer():
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
Remove unused fixture from orderer
|
Remove unused fixture from orderer
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
- def orderer(normal_user):
+ def orderer():
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
Remove unused fixture from orderer
|
## Code Before:
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer(normal_user):
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
## Instruction:
Remove unused fixture from orderer
## Code After:
import pytest
from byceps.services.shop.cart.models import Cart
from byceps.services.shop.sequence import service as sequence_service
from byceps.services.shop.shop import service as shop_service
from testfixtures.shop_order import create_orderer
from tests.helpers import create_user_with_detail
@pytest.fixture
def shop(email_config):
return shop_service.create_shop('shop-01', 'Some Shop', email_config.id)
@pytest.fixture
def orderer():
user = create_user_with_detail('Besteller')
return create_orderer(user)
@pytest.fixture
def empty_cart() -> Cart:
return Cart()
@pytest.fixture
def order_number_sequence(shop) -> None:
sequence_service.create_order_number_sequence(shop.id, 'order-')
|
...
@pytest.fixture
def orderer():
user = create_user_with_detail('Besteller')
...
|
27d7ab7ecca0d2e6307dbcb1317b486fe77a97d7
|
cyder/core/system/models.py
|
cyder/core/system/models.py
|
from django.db import models
from cyder.base.mixins import ObjectUrlMixin
from cyder.base.models import BaseModel
from cyder.cydhcp.keyvalue.models import KeyValue
class System(BaseModel, ObjectUrlMixin):
name = models.CharField(max_length=255, unique=False)
search_fields = ('name',)
display_fields = ('name', 'pk')
def __str__(self):
return "{0} : {1}".format(*(str(getattr(self, f))
for f in self.display_fields))
class Meta:
db_table = 'system'
def details(self):
"""For tables."""
data = super(System, self).details()
data['data'] = [
('Name', 'name', self),
]
return data
@staticmethod
def eg_metadata():
"""EditableGrid metadata."""
return {'metadata': [
{'name': 'name', 'datatype': 'string', 'editable': True},
]}
class SystemKeyValue(KeyValue):
system = models.ForeignKey(System, null=False)
class Meta:
db_table = 'system_key_value'
unique_together = ('key', 'value', 'system')
|
from django.db import models
from cyder.base.mixins import ObjectUrlMixin
from cyder.base.models import BaseModel
from cyder.base.helpers import get_display
from cyder.cydhcp.keyvalue.models import KeyValue
class System(BaseModel, ObjectUrlMixin):
name = models.CharField(max_length=255, unique=False)
search_fields = ('name',)
display_fields = ('name',)
def __str__(self):
return get_display(self)
class Meta:
db_table = 'system'
def details(self):
"""For tables."""
data = super(System, self).details()
data['data'] = [
('Name', 'name', self),
]
return data
@staticmethod
def eg_metadata():
"""EditableGrid metadata."""
return {'metadata': [
{'name': 'name', 'datatype': 'string', 'editable': True},
]}
class SystemKeyValue(KeyValue):
system = models.ForeignKey(System, null=False)
class Meta:
db_table = 'system_key_value'
unique_together = ('key', 'value', 'system')
|
Revert system names to normal
|
Revert system names to normal
|
Python
|
bsd-3-clause
|
drkitty/cyder,OSU-Net/cyder,drkitty/cyder,murrown/cyder,zeeman/cyder,OSU-Net/cyder,zeeman/cyder,murrown/cyder,akeym/cyder,akeym/cyder,akeym/cyder,zeeman/cyder,drkitty/cyder,OSU-Net/cyder,zeeman/cyder,murrown/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder
|
from django.db import models
from cyder.base.mixins import ObjectUrlMixin
from cyder.base.models import BaseModel
+ from cyder.base.helpers import get_display
from cyder.cydhcp.keyvalue.models import KeyValue
class System(BaseModel, ObjectUrlMixin):
name = models.CharField(max_length=255, unique=False)
search_fields = ('name',)
- display_fields = ('name', 'pk')
+ display_fields = ('name',)
def __str__(self):
+ return get_display(self)
- return "{0} : {1}".format(*(str(getattr(self, f))
- for f in self.display_fields))
class Meta:
db_table = 'system'
def details(self):
"""For tables."""
data = super(System, self).details()
data['data'] = [
('Name', 'name', self),
]
return data
@staticmethod
def eg_metadata():
"""EditableGrid metadata."""
return {'metadata': [
{'name': 'name', 'datatype': 'string', 'editable': True},
]}
class SystemKeyValue(KeyValue):
system = models.ForeignKey(System, null=False)
class Meta:
db_table = 'system_key_value'
unique_together = ('key', 'value', 'system')
|
Revert system names to normal
|
## Code Before:
from django.db import models
from cyder.base.mixins import ObjectUrlMixin
from cyder.base.models import BaseModel
from cyder.cydhcp.keyvalue.models import KeyValue
class System(BaseModel, ObjectUrlMixin):
name = models.CharField(max_length=255, unique=False)
search_fields = ('name',)
display_fields = ('name', 'pk')
def __str__(self):
return "{0} : {1}".format(*(str(getattr(self, f))
for f in self.display_fields))
class Meta:
db_table = 'system'
def details(self):
"""For tables."""
data = super(System, self).details()
data['data'] = [
('Name', 'name', self),
]
return data
@staticmethod
def eg_metadata():
"""EditableGrid metadata."""
return {'metadata': [
{'name': 'name', 'datatype': 'string', 'editable': True},
]}
class SystemKeyValue(KeyValue):
system = models.ForeignKey(System, null=False)
class Meta:
db_table = 'system_key_value'
unique_together = ('key', 'value', 'system')
## Instruction:
Revert system names to normal
## Code After:
from django.db import models
from cyder.base.mixins import ObjectUrlMixin
from cyder.base.models import BaseModel
from cyder.base.helpers import get_display
from cyder.cydhcp.keyvalue.models import KeyValue
class System(BaseModel, ObjectUrlMixin):
name = models.CharField(max_length=255, unique=False)
search_fields = ('name',)
display_fields = ('name',)
def __str__(self):
return get_display(self)
class Meta:
db_table = 'system'
def details(self):
"""For tables."""
data = super(System, self).details()
data['data'] = [
('Name', 'name', self),
]
return data
@staticmethod
def eg_metadata():
"""EditableGrid metadata."""
return {'metadata': [
{'name': 'name', 'datatype': 'string', 'editable': True},
]}
class SystemKeyValue(KeyValue):
system = models.ForeignKey(System, null=False)
class Meta:
db_table = 'system_key_value'
unique_together = ('key', 'value', 'system')
|
// ... existing code ...
from cyder.base.models import BaseModel
from cyder.base.helpers import get_display
from cyder.cydhcp.keyvalue.models import KeyValue
// ... modified code ...
search_fields = ('name',)
display_fields = ('name',)
...
def __str__(self):
return get_display(self)
// ... rest of the code ...
|
a5cf50da81460ab68063689f3e2cadb5db18a3d8
|
common/candle_keras/__init__.py
|
common/candle_keras/__init__.py
|
from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
|
from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
|
Split multiple arguments for consistency
|
Split multiple arguments for consistency
|
Python
|
mit
|
ECP-CANDLE/Benchmarks,ECP-CANDLE/Benchmarks,ECP-CANDLE/Benchmarks
|
from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
- from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
+ from solr_keras import CandleRemoteMonitor
+ from solr_keras import compute_trainable_params
+ from solr_keras import TerminateOnTimeOut
|
Split multiple arguments for consistency
|
## Code Before:
from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor, compute_trainable_params, TerminateOnTimeOut
## Instruction:
Split multiple arguments for consistency
## Code After:
from __future__ import absolute_import
#__version__ = '0.0.0'
#import from data_utils
from data_utils import load_csv_data
from data_utils import load_Xy_one_hot_data2
from data_utils import load_Xy_data_noheader
#import from file_utils
from file_utils import get_file
#import from default_utils
from default_utils import ArgumentStruct
from default_utils import Benchmark
from default_utils import str2bool
from default_utils import initialize_parameters
from default_utils import fetch_file
from default_utils import verify_path
from default_utils import keras_default_config
from default_utils import set_up_logger
#import from keras_utils
#from keras_utils import dense
#from keras_utils import add_dense
from keras_utils import build_initializer
from keras_utils import build_optimizer
from keras_utils import set_seed
from keras_utils import set_parallelism_threads
from generic_utils import Progbar
from generic_utils import LoggingCallback
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
|
// ... existing code ...
from solr_keras import CandleRemoteMonitor
from solr_keras import compute_trainable_params
from solr_keras import TerminateOnTimeOut
// ... rest of the code ...
|
982f4af638e83ee49c87a0dffad2b47daf872749
|
workers/data_refinery_workers/downloaders/test_utils.py
|
workers/data_refinery_workers/downloaders/test_utils.py
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
Add tag to downloaders test so it is actually run.
|
Add tag to downloaders test so it is actually run.
|
Python
|
bsd-3-clause
|
data-refinery/data_refinery,data-refinery/data_refinery,data-refinery/data_refinery
|
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
+ @tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
Add tag to downloaders test so it is actually run.
|
## Code Before:
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
## Instruction:
Add tag to downloaders test so it is actually run.
## Code After:
import os
from django.test import TestCase, tag
from typing import List
from unittest.mock import patch, call
from urllib.error import URLError
from data_refinery_workers.downloaders import utils
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
"""Make sure this function doesn't raise an exception with no files."""
create_processor_job_for_original_files([])
self.assertTrue(True)
|
# ... existing code ...
class UtilsTestCase(TestCase):
@tag('downloaders')
def test_no_jobs_to_create(self):
# ... rest of the code ...
|
27065fd302c20937d44b840472d943ce8aa652e7
|
plugins/candela/girder_plugin_candela/__init__.py
|
plugins/candela/girder_plugin_candela/__init__.py
|
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
|
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
|
Add a plugin displayName property
|
Add a plugin displayName property
This allows the web client to display an arbitrary plugin title rather
than to be restricted to valid python/javascript tokens.
|
Python
|
apache-2.0
|
Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela
|
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
+ DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
|
Add a plugin displayName property
|
## Code Before:
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
## Instruction:
Add a plugin displayName property
## Code After:
from girder.plugin import GirderPlugin
class CandelaPlugin(GirderPlugin):
DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
def load(self, info):
pass
|
# ... existing code ...
class CandelaPlugin(GirderPlugin):
DISPLAY_NAME = 'Candela Visualization'
NPM_PACKAGE_NAME = '@girder/candela'
# ... rest of the code ...
|
057510c78f80c3592c562006413049ab1292d0a3
|
ipaqe_provision_hosts/backend/base.py
|
ipaqe_provision_hosts/backend/base.py
|
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
|
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
|
Replace the exceptions in backend classes
|
Replace the exceptions in backend classes
|
Python
|
mit
|
apophys/ipaqe-provision-hosts
|
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
+ from ipaqe_provision_hosts.errors import IPAQEProvisionerError
-
- class IDMBackendException(Exception):
- pass
+ class VMsNotCreatedError(IPAQEProvisionerError):
- class VMsNotCreatedError(IDMBackendException):
- pass
-
- class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
|
Replace the exceptions in backend classes
|
## Code Before:
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
class IDMBackendException(Exception):
pass
class VMsNotCreatedError(IDMBackendException):
pass
class IDMBackendMissingName(IDMBackendException):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
## Instruction:
Replace the exceptions in backend classes
## Code After:
NOT_IMPLEMENTED_MSG = "You need to override this method in a subclass"
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
class VMsNotCreatedError(IPAQEProvisionerError):
pass
class IDMBackendBase(object):
"""IDMBackendBase class
This class represents a contract between the
idm-prepare-hosts utility and a backend implementation.
"""
def __init__(self, config=None):
self._config = config or {}
self._vms = []
@property
def vms(self):
"""The attribute returns a list of host entries"""
if not self._vms:
raise VMsNotCreatedError("No VMs were provisioned yet")
else:
return self._vms
def provision_resources(self, vm_count):
"""Provision the hosts in a backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
def delete_resources(self):
"""Delete the resources provisioned by the backend"""
raise NotImplementedError(NOT_IMPLEMENTED_MSG)
|
// ... existing code ...
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
// ... modified code ...
class VMsNotCreatedError(IPAQEProvisionerError):
pass
// ... rest of the code ...
|
1e10fa30998f63359ddd26d9804bd32a837c2cab
|
armstrong/esi/tests/_utils.py
|
armstrong/esi/tests/_utils.py
|
from django.conf import settings
from django.test import TestCase as DjangoTestCase
import fudge
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
|
from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
import fudge
def with_fake_request(func):
def inner(self, *args, **kwargs):
request = fudge.Fake(HttpRequest)
fudge.clear_calls()
result = func(self, request, *args, **kwargs)
fudge.verify()
fudge.clear_expectations()
return result
return inner
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
|
Add in a decorator for generating fake request objects for test cases
|
Add in a decorator for generating fake request objects for test cases
|
Python
|
bsd-3-clause
|
armstrong/armstrong.esi
|
from django.conf import settings
+ from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
import fudge
+
+ def with_fake_request(func):
+ def inner(self, *args, **kwargs):
+ request = fudge.Fake(HttpRequest)
+ fudge.clear_calls()
+
+ result = func(self, request, *args, **kwargs)
+
+ fudge.verify()
+ fudge.clear_expectations()
+ return result
+ return inner
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
|
Add in a decorator for generating fake request objects for test cases
|
## Code Before:
from django.conf import settings
from django.test import TestCase as DjangoTestCase
import fudge
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
## Instruction:
Add in a decorator for generating fake request objects for test cases
## Code After:
from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
import fudge
def with_fake_request(func):
def inner(self, *args, **kwargs):
request = fudge.Fake(HttpRequest)
fudge.clear_calls()
result = func(self, request, *args, **kwargs)
fudge.verify()
fudge.clear_expectations()
return result
return inner
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
|
// ... existing code ...
from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
// ... modified code ...
import fudge
def with_fake_request(func):
def inner(self, *args, **kwargs):
request = fudge.Fake(HttpRequest)
fudge.clear_calls()
result = func(self, request, *args, **kwargs)
fudge.verify()
fudge.clear_expectations()
return result
return inner
// ... rest of the code ...
|
d1e2aacb7926a7e751cd27eb562b2c5d86f7e1e8
|
opal/tests/test_core_test_runner.py
|
opal/tests/test_core_test_runner.py
|
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
Add test for opal test py -t
|
Add test for opal test py -t
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
+ @patch('subprocess.check_call')
+ def test_run_tests_with_test_arg(self, check_call):
+ mock_args = MagicMock(name="args")
+ mock_args.userland_here = ffs.Path('.')
+ mock_args.coverage = False
+ mock_args.test = 'opal.tests.foo'
+ test_runner._run_py_tests(mock_args)
+ check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
+
+
+
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
Add test for opal test py -t
|
## Code Before:
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
## Instruction:
Add test for opal test py -t
## Code After:
import ffs
from mock import MagicMock, patch
from opal.core.test import OpalTestCase
from opal.core import test_runner
class RunPyTestsTestCase(OpalTestCase):
@patch('subprocess.check_call')
def test_run_tests(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = None
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py'])
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
class RunJSTestsTestCase(OpalTestCase):
pass
class RunTestsTestCase(OpalTestCase):
pass
|
# ... existing code ...
@patch('subprocess.check_call')
def test_run_tests_with_test_arg(self, check_call):
mock_args = MagicMock(name="args")
mock_args.userland_here = ffs.Path('.')
mock_args.coverage = False
mock_args.test = 'opal.tests.foo'
test_runner._run_py_tests(mock_args)
check_call.assert_called_once_with(['python', 'runtests.py', 'opal.tests.foo'])
# ... rest of the code ...
|
3747158af790a38ccfce217426ee5261877e9f0e
|
project/api/management/commands/seed_database.py
|
project/api/management/commands/seed_database.py
|
from django.core.management.base import BaseCommand
from api.factories import (
InternationalFactory,
)
class Command(BaseCommand):
help = "Command to seed database."
def handle(self, *args, **options):
InternationalFactory()
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Command to seed database."
from api.factories import (
InternationalFactory,
)
def handle(self, *args, **options):
self.InternationalFactory()
|
Fix seeding in management command
|
Fix seeding in management command
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,dbinetti/barberscore-django,dbinetti/barberscore
|
from django.core.management.base import BaseCommand
-
-
- from api.factories import (
- InternationalFactory,
- )
class Command(BaseCommand):
help = "Command to seed database."
+ from api.factories import (
+ InternationalFactory,
+ )
+
def handle(self, *args, **options):
- InternationalFactory()
+ self.InternationalFactory()
|
Fix seeding in management command
|
## Code Before:
from django.core.management.base import BaseCommand
from api.factories import (
InternationalFactory,
)
class Command(BaseCommand):
help = "Command to seed database."
def handle(self, *args, **options):
InternationalFactory()
## Instruction:
Fix seeding in management command
## Code After:
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Command to seed database."
from api.factories import (
InternationalFactory,
)
def handle(self, *args, **options):
self.InternationalFactory()
|
...
from django.core.management.base import BaseCommand
...
from api.factories import (
InternationalFactory,
)
def handle(self, *args, **options):
self.InternationalFactory()
...
|
902b2b0929dad116664d37a13ff325a10b67db7b
|
catalog/queue/sqs.py
|
catalog/queue/sqs.py
|
from Queue import Queue, Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
|
from multiprocessing import Queue
from Queue import Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
|
Use queue from multiprocessing library instead of Queue
|
Use queue from multiprocessing library instead of Queue
|
Python
|
mpl-2.0
|
mozilla/structured-catalog
|
+ from multiprocessing import Queue
- from Queue import Queue, Empty
+ from Queue import Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
|
Use queue from multiprocessing library instead of Queue
|
## Code Before:
from Queue import Queue, Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
## Instruction:
Use queue from multiprocessing library instead of Queue
## Code After:
from multiprocessing import Queue
from Queue import Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
|
# ... existing code ...
from multiprocessing import Queue
from Queue import Empty
import json
# ... rest of the code ...
|
6e583085ac056b7df2b29a94cd6743493c151684
|
subjectivity_clues/clues.py
|
subjectivity_clues/clues.py
|
import os
import shlex
class Clues:
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
def __init__(self, filename=DEFAULT_FILENAME):
lines = self.read_all(filename)
self.lexicons = self.parse_clues(lines)
@staticmethod
def read_all(filename):
with open(filename, 'r') as f:
clues = f.readlines()
return clues
@staticmethod
def parse_clues(lines):
clues = dict()
for l in lines:
clue = dict(token.split('=') for token in shlex.split(l))
word = clue['word1']
clues[word] = clue
return clues
if __name__ == '__main__':
c = Clues()
|
import os
import shlex
class Clues:
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
PRIORPOLARITY = {
'positive': 1,
'negative': -1,
'both': 0,
'neutral': 0
}
TYPE = {
'strongsubj': 2,
'weaksubj': 1
}
def __init__(self, filename=DEFAULT_FILENAME):
lines = self.read_all(filename)
self.lexicons = self.parse_clues(lines)
@staticmethod
def read_all(filename):
with open(filename, 'r') as f:
clues = f.readlines()
return clues
@staticmethod
def parse_clues(lines):
clues = dict()
for l in lines:
clue = dict(token.split('=') for token in shlex.split(l))
word = clue['word1']
clues[word] = clue
return clues
def calculate(self, sentence):
related_words = 0
total_subjectivity = 0
total_priorpolarity = 0
for w in sentence.split(' '):
if w not in self.lexicons.keys():
continue
related_words += 1
total_subjectivity += self.TYPE[self.lexicons[w]['type']]
total_priorpolarity += self.PRIORPOLARITY[self.lexicons[w]['priorpolarity']]
return {
'sentence': sentence,
'related_words': related_words,
'total_subjectivity': total_subjectivity,
'total_priorpolarity': total_priorpolarity
}
if __name__ == '__main__':
c = Clues()
|
Add calculation to the lexicon
|
Add calculation to the lexicon
|
Python
|
apache-2.0
|
chuajiesheng/twitter-sentiment-analysis
|
import os
import shlex
class Clues:
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
+
+ PRIORPOLARITY = {
+ 'positive': 1,
+ 'negative': -1,
+ 'both': 0,
+ 'neutral': 0
+ }
+
+ TYPE = {
+ 'strongsubj': 2,
+ 'weaksubj': 1
+ }
def __init__(self, filename=DEFAULT_FILENAME):
lines = self.read_all(filename)
self.lexicons = self.parse_clues(lines)
@staticmethod
def read_all(filename):
with open(filename, 'r') as f:
clues = f.readlines()
return clues
@staticmethod
def parse_clues(lines):
clues = dict()
for l in lines:
clue = dict(token.split('=') for token in shlex.split(l))
word = clue['word1']
clues[word] = clue
return clues
+ def calculate(self, sentence):
+ related_words = 0
+ total_subjectivity = 0
+ total_priorpolarity = 0
+
+ for w in sentence.split(' '):
+ if w not in self.lexicons.keys():
+ continue
+
+ related_words += 1
+ total_subjectivity += self.TYPE[self.lexicons[w]['type']]
+ total_priorpolarity += self.PRIORPOLARITY[self.lexicons[w]['priorpolarity']]
+
+ return {
+ 'sentence': sentence,
+ 'related_words': related_words,
+ 'total_subjectivity': total_subjectivity,
+ 'total_priorpolarity': total_priorpolarity
+ }
+
if __name__ == '__main__':
c = Clues()
|
Add calculation to the lexicon
|
## Code Before:
import os
import shlex
class Clues:
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
def __init__(self, filename=DEFAULT_FILENAME):
lines = self.read_all(filename)
self.lexicons = self.parse_clues(lines)
@staticmethod
def read_all(filename):
with open(filename, 'r') as f:
clues = f.readlines()
return clues
@staticmethod
def parse_clues(lines):
clues = dict()
for l in lines:
clue = dict(token.split('=') for token in shlex.split(l))
word = clue['word1']
clues[word] = clue
return clues
if __name__ == '__main__':
c = Clues()
## Instruction:
Add calculation to the lexicon
## Code After:
import os
import shlex
class Clues:
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
PRIORPOLARITY = {
'positive': 1,
'negative': -1,
'both': 0,
'neutral': 0
}
TYPE = {
'strongsubj': 2,
'weaksubj': 1
}
def __init__(self, filename=DEFAULT_FILENAME):
lines = self.read_all(filename)
self.lexicons = self.parse_clues(lines)
@staticmethod
def read_all(filename):
with open(filename, 'r') as f:
clues = f.readlines()
return clues
@staticmethod
def parse_clues(lines):
clues = dict()
for l in lines:
clue = dict(token.split('=') for token in shlex.split(l))
word = clue['word1']
clues[word] = clue
return clues
def calculate(self, sentence):
related_words = 0
total_subjectivity = 0
total_priorpolarity = 0
for w in sentence.split(' '):
if w not in self.lexicons.keys():
continue
related_words += 1
total_subjectivity += self.TYPE[self.lexicons[w]['type']]
total_priorpolarity += self.PRIORPOLARITY[self.lexicons[w]['priorpolarity']]
return {
'sentence': sentence,
'related_words': related_words,
'total_subjectivity': total_subjectivity,
'total_priorpolarity': total_priorpolarity
}
if __name__ == '__main__':
c = Clues()
|
# ... existing code ...
DEFAULT_FILENAME = os.getcwd() + os.sep + 'subjectivity_clues' + os.sep + 'subjclueslen1-HLTEMNLP05.tff'
PRIORPOLARITY = {
'positive': 1,
'negative': -1,
'both': 0,
'neutral': 0
}
TYPE = {
'strongsubj': 2,
'weaksubj': 1
}
# ... modified code ...
def calculate(self, sentence):
related_words = 0
total_subjectivity = 0
total_priorpolarity = 0
for w in sentence.split(' '):
if w not in self.lexicons.keys():
continue
related_words += 1
total_subjectivity += self.TYPE[self.lexicons[w]['type']]
total_priorpolarity += self.PRIORPOLARITY[self.lexicons[w]['priorpolarity']]
return {
'sentence': sentence,
'related_words': related_words,
'total_subjectivity': total_subjectivity,
'total_priorpolarity': total_priorpolarity
}
if __name__ == '__main__':
# ... rest of the code ...
|
a17c2ce30f30d0441b1475457b0bc9d04da9f143
|
coil/__init__.py
|
coil/__init__.py
|
"""Coil: A Configuration Library."""
__version__ = "0.2.2"
|
"""Coil: A Configuration Library."""
__version__ = "0.3.0"
from coil.parser import Parser
def parse_file(file_name):
"""Open and parse a coil file.
Returns the root Struct.
"""
coil = open(file_name)
return Parser(coil, file_name).root()
def parse(string):
"""Parse a coil string.
Returns the root Struct.
"""
return Parser(string.splitlines()).root()
|
Add helpers for parsing files and strings
|
Add helpers for parsing files and strings
|
Python
|
mit
|
tectronics/coil,marineam/coil,kovacsbalu/coil,kovacsbalu/coil,marineam/coil,tectronics/coil
|
"""Coil: A Configuration Library."""
- __version__ = "0.2.2"
+ __version__ = "0.3.0"
+ from coil.parser import Parser
+
+ def parse_file(file_name):
+ """Open and parse a coil file.
+
+ Returns the root Struct.
+ """
+ coil = open(file_name)
+ return Parser(coil, file_name).root()
+
+ def parse(string):
+ """Parse a coil string.
+
+ Returns the root Struct.
+ """
+ return Parser(string.splitlines()).root()
+
|
Add helpers for parsing files and strings
|
## Code Before:
"""Coil: A Configuration Library."""
__version__ = "0.2.2"
## Instruction:
Add helpers for parsing files and strings
## Code After:
"""Coil: A Configuration Library."""
__version__ = "0.3.0"
from coil.parser import Parser
def parse_file(file_name):
"""Open and parse a coil file.
Returns the root Struct.
"""
coil = open(file_name)
return Parser(coil, file_name).root()
def parse(string):
"""Parse a coil string.
Returns the root Struct.
"""
return Parser(string.splitlines()).root()
|
# ... existing code ...
__version__ = "0.3.0"
from coil.parser import Parser
def parse_file(file_name):
"""Open and parse a coil file.
Returns the root Struct.
"""
coil = open(file_name)
return Parser(coil, file_name).root()
def parse(string):
"""Parse a coil string.
Returns the root Struct.
"""
return Parser(string.splitlines()).root()
# ... rest of the code ...
|
ad6b7fe871be502220de5bcb6c2a65f4e7999294
|
etcd3/client.py
|
etcd3/client.py
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
Add compact and delete stubs
|
Add compact and delete stubs
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
+ def delete(self, key):
+ pass
+
+ def compact(self):
+ pass
+
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
Add compact and delete stubs
|
## Code Before:
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
## Instruction:
Add compact and delete stubs
## Code After:
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
...
def delete(self, key):
pass
def compact(self):
pass
...
|
578fe6f7403de0f93b3ca2776092e5dfe8dbfa73
|
twisted/plugins/docker_xylem_plugin.py
|
twisted/plugins/docker_xylem_plugin.py
|
import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
|
import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
|
Create /run/docker/plugins before using it. (@bearnard)
|
Create /run/docker/plugins before using it. (@bearnard)
|
Python
|
mit
|
praekeltfoundation/docker-xylem,praekeltfoundation/docker-xylem
|
import yaml
from zope.interface import implements
- from twisted.python import usage
+ from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
+ sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
+ if not sockfp.parent().exists():
+ sockfp.parent().makedirs()
- return internet.UNIXServer(config.get(
+ return internet.UNIXServer(
- 'socket', "/run/docker/plugins/xylem.sock"),
+ config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
|
Create /run/docker/plugins before using it. (@bearnard)
|
## Code Before:
import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
## Instruction:
Create /run/docker/plugins before using it. (@bearnard)
## Code After:
import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
|
# ... existing code ...
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
# ... modified code ...
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
# ... rest of the code ...
|
32a592c82ab0b727c56084063d49039bb693a2b0
|
corehq/apps/reports/commtrack/util.py
|
corehq/apps/reports/commtrack/util.py
|
from corehq.apps.locations.models import all_locations
from corehq.apps.commtrack.models import SupplyPointCase
from corehq.apps.products.models import Product
def supply_point_ids(locations):
keys = [[loc.domain, loc._id] for loc in locations]
rows = SupplyPointCase.get_db().view(
'commtrack/supply_point_by_loc',
keys=keys,
include_docs=False,
)
return [row['id'] for row in rows]
def get_relevant_supply_point_ids(domain, active_location=None):
if active_location:
return supply_point_ids([active_location] + active_location.descendants)
else:
return supply_point_ids(all_locations(domain))
def product_ids_filtered_by_program(domain, program):
products = Product.by_program_id(domain, program, False)
return [p['_id'] for p in products]
|
from corehq.apps.locations.models import SQLLocation
from corehq.apps.commtrack.models import SupplyPointCase
from corehq.apps.products.models import Product
def supply_point_ids(locations):
keys = [[loc.domain, loc._id] for loc in locations]
rows = SupplyPointCase.get_db().view(
'commtrack/supply_point_by_loc',
keys=keys,
include_docs=False,
)
return [row['id'] for row in rows]
def get_relevant_supply_point_ids(domain, active_location=None):
"""
Return a list of supply point ids for the selected location
and all of its descendants OR all supply point ids in the domain.
"""
def filter_relevant(queryset):
return queryset.filter(
supply_point_id__isnull=False
).values_list(
'supply_point_id',
flat=True
)
if active_location:
sql_location = active_location.sql_location
supply_point_ids = []
if sql_location.supply_point_id:
supply_point_ids.append(sql_location.supply_point_id)
supply_point_ids += list(
filter_relevant(sql_location.get_descendants())
)
return supply_point_ids
else:
return filter_relevant(SQLLocation.objects.filter(domain=domain))
def product_ids_filtered_by_program(domain, program):
products = Product.by_program_id(domain, program, False)
return [p['_id'] for p in products]
|
Switch supply point id list lookup to SQL
|
Switch supply point id list lookup to SQL
Locally with 1000 users this took the product_data method from 1.7
seconds to .2 seconds.
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq
|
- from corehq.apps.locations.models import all_locations
+ from corehq.apps.locations.models import SQLLocation
from corehq.apps.commtrack.models import SupplyPointCase
from corehq.apps.products.models import Product
def supply_point_ids(locations):
keys = [[loc.domain, loc._id] for loc in locations]
rows = SupplyPointCase.get_db().view(
'commtrack/supply_point_by_loc',
keys=keys,
include_docs=False,
)
return [row['id'] for row in rows]
def get_relevant_supply_point_ids(domain, active_location=None):
+ """
+ Return a list of supply point ids for the selected location
+ and all of its descendants OR all supply point ids in the domain.
+ """
+ def filter_relevant(queryset):
+ return queryset.filter(
+ supply_point_id__isnull=False
+ ).values_list(
+ 'supply_point_id',
+ flat=True
+ )
+
if active_location:
- return supply_point_ids([active_location] + active_location.descendants)
+ sql_location = active_location.sql_location
+ supply_point_ids = []
+ if sql_location.supply_point_id:
+ supply_point_ids.append(sql_location.supply_point_id)
+ supply_point_ids += list(
+ filter_relevant(sql_location.get_descendants())
+ )
+
+ return supply_point_ids
else:
- return supply_point_ids(all_locations(domain))
+ return filter_relevant(SQLLocation.objects.filter(domain=domain))
def product_ids_filtered_by_program(domain, program):
products = Product.by_program_id(domain, program, False)
return [p['_id'] for p in products]
|
Switch supply point id list lookup to SQL
|
## Code Before:
from corehq.apps.locations.models import all_locations
from corehq.apps.commtrack.models import SupplyPointCase
from corehq.apps.products.models import Product
def supply_point_ids(locations):
keys = [[loc.domain, loc._id] for loc in locations]
rows = SupplyPointCase.get_db().view(
'commtrack/supply_point_by_loc',
keys=keys,
include_docs=False,
)
return [row['id'] for row in rows]
def get_relevant_supply_point_ids(domain, active_location=None):
if active_location:
return supply_point_ids([active_location] + active_location.descendants)
else:
return supply_point_ids(all_locations(domain))
def product_ids_filtered_by_program(domain, program):
products = Product.by_program_id(domain, program, False)
return [p['_id'] for p in products]
## Instruction:
Switch supply point id list lookup to SQL
## Code After:
from corehq.apps.locations.models import SQLLocation
from corehq.apps.commtrack.models import SupplyPointCase
from corehq.apps.products.models import Product
def supply_point_ids(locations):
keys = [[loc.domain, loc._id] for loc in locations]
rows = SupplyPointCase.get_db().view(
'commtrack/supply_point_by_loc',
keys=keys,
include_docs=False,
)
return [row['id'] for row in rows]
def get_relevant_supply_point_ids(domain, active_location=None):
"""
Return a list of supply point ids for the selected location
and all of its descendants OR all supply point ids in the domain.
"""
def filter_relevant(queryset):
return queryset.filter(
supply_point_id__isnull=False
).values_list(
'supply_point_id',
flat=True
)
if active_location:
sql_location = active_location.sql_location
supply_point_ids = []
if sql_location.supply_point_id:
supply_point_ids.append(sql_location.supply_point_id)
supply_point_ids += list(
filter_relevant(sql_location.get_descendants())
)
return supply_point_ids
else:
return filter_relevant(SQLLocation.objects.filter(domain=domain))
def product_ids_filtered_by_program(domain, program):
products = Product.by_program_id(domain, program, False)
return [p['_id'] for p in products]
|
// ... existing code ...
from corehq.apps.locations.models import SQLLocation
from corehq.apps.commtrack.models import SupplyPointCase
// ... modified code ...
def get_relevant_supply_point_ids(domain, active_location=None):
"""
Return a list of supply point ids for the selected location
and all of its descendants OR all supply point ids in the domain.
"""
def filter_relevant(queryset):
return queryset.filter(
supply_point_id__isnull=False
).values_list(
'supply_point_id',
flat=True
)
if active_location:
sql_location = active_location.sql_location
supply_point_ids = []
if sql_location.supply_point_id:
supply_point_ids.append(sql_location.supply_point_id)
supply_point_ids += list(
filter_relevant(sql_location.get_descendants())
)
return supply_point_ids
else:
return filter_relevant(SQLLocation.objects.filter(domain=domain))
// ... rest of the code ...
|
0baa14975ae1b0729021ecf4d0d88acadb866414
|
pfamserver/api.py
|
pfamserver/api.py
|
from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
from distutils.sysconfig import get_python_lib
from autoupdate import lib_path, db_path
api = Api(app)
fetch = '{:s}/hmmer/easel/miniapps/esl-afetch'.format(lib_path)
def db(query):
cmd = [fetch, db_path, query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
|
from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
from distutils.sysconfig import get_python_lib
from autoupdate import lib_path, db_path
api = Api(app)
fetch = '{:s}/hmmer/easel/miniapps/esl-afetch'.format(lib_path)
def db(query):
cmd = [fetch, db_path, query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.upper(), query.capitalize(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
|
Improve the query execution order.
|
Improve the query execution order.
|
Python
|
agpl-3.0
|
ecolell/pfamserver,ecolell/pfamserver,ecolell/pfamserver
|
from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
from distutils.sysconfig import get_python_lib
from autoupdate import lib_path, db_path
api = Api(app)
fetch = '{:s}/hmmer/easel/miniapps/esl-afetch'.format(lib_path)
def db(query):
cmd = [fetch, db_path, query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
- queries = [query, query.capitalize(), query.upper(), query.lower()]
+ queries = [query, query.upper(), query.capitalize(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
|
Improve the query execution order.
|
## Code Before:
from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
from distutils.sysconfig import get_python_lib
from autoupdate import lib_path, db_path
api = Api(app)
fetch = '{:s}/hmmer/easel/miniapps/esl-afetch'.format(lib_path)
def db(query):
cmd = [fetch, db_path, query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.capitalize(), query.upper(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
## Instruction:
Improve the query execution order.
## Code After:
from application import app
from flask.ext.restful import Api, Resource
import os
from subprocess import Popen as run, PIPE
from distutils.sysconfig import get_python_lib
from autoupdate import lib_path, db_path
api = Api(app)
fetch = '{:s}/hmmer/easel/miniapps/esl-afetch'.format(lib_path)
def db(query):
cmd = [fetch, db_path, query]
return run(cmd, stdout=PIPE).communicate()[0]
class QueryAPI(Resource):
def get(self, query):
queries = [query, query.upper(), query.capitalize(), query.lower()]
for q in queries:
output = db(q)
if output:
return {'query': q, 'output': output}
return {'query': query, 'output': output}
api.add_resource(QueryAPI, '/api/query/<string:query>', endpoint = 'query')
|
// ... existing code ...
def get(self, query):
queries = [query, query.upper(), query.capitalize(), query.lower()]
// ... rest of the code ...
|
2b1cc5b2426994953e8f8b937364d91f4e7aadf2
|
MyHub/MyHub/urls.py
|
MyHub/MyHub/urls.py
|
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Change default URL to display home content. Temporary fix.
|
Change default URL to display home content. Temporary fix.
|
Python
|
mit
|
sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com
|
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
- url(r'^$', loader_page, name='loader'),
+ url(r'^$', home_page, name='loader'),
- url(r'^home/$', home_page, name='index'),
+ # url(r'^home/$', home_page, name='index'),
- url(r'^resume/$', resume_page, name='resume'),
+ # url(r'^resume/$', resume_page, name='resume'),
- url(r'^projects/$', projects_page, name='projects'),
+ # url(r'^projects/$', projects_page, name='projects'),
- url(r'^contact/$', contact_page, name='contact'),
+ # url(r'^contact/$', contact_page, name='contact'),
- url(r'^admin/', include(admin.site.urls)),
+ # url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Change default URL to display home content. Temporary fix.
|
## Code Before:
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
## Instruction:
Change default URL to display home content. Temporary fix.
## Code After:
from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
# ... existing code ...
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# ... rest of the code ...
|
057cdbdb0cd3edb18201ca090f57908681512c76
|
openupgradelib/__init__.py
|
openupgradelib/__init__.py
|
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = '[email protected]'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
|
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = '[email protected]'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
|
Fix issue when running setup.py on python<3.8
|
Fix issue when running setup.py on python<3.8
|
Python
|
agpl-3.0
|
OCA/openupgradelib
|
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = '[email protected]'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
+ try:
- if sys.version_info >= (3, 8):
+ if sys.version_info >= (3, 8):
- from importlib.metadata import version, PackageNotFoundError
+ from importlib.metadata import version, PackageNotFoundError
+ else:
+ from importlib_metadata import version, PackageNotFoundError
+ except ImportError:
+ # this happens when setup.py imports openupgradelib
+ pass
else:
- from importlib_metadata import version, PackageNotFoundError
+ try:
+ __version__ = version("openupgradelib")
+ except PackageNotFoundError:
+ # package is not installed
+ pass
- try:
- __version__ = version("openupgradelib")
- except PackageNotFoundError:
- # package is not installed
- pass
-
|
Fix issue when running setup.py on python<3.8
|
## Code Before:
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = '[email protected]'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
## Instruction:
Fix issue when running setup.py on python<3.8
## Code After:
import sys
__author__ = 'Odoo Community Association (OCA)'
__email__ = '[email protected]'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
|
...
try:
if sys.version_info >= (3, 8):
from importlib.metadata import version, PackageNotFoundError
else:
from importlib_metadata import version, PackageNotFoundError
except ImportError:
# this happens when setup.py imports openupgradelib
pass
else:
try:
__version__ = version("openupgradelib")
except PackageNotFoundError:
# package is not installed
pass
...
|
093702a38645853d560606446da0b078ba12d14e
|
eventkit_cloud/auth/admin.py
|
eventkit_cloud/auth/admin.py
|
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
Add OAuth class information to the UserAdmin page.
|
Add OAuth class information to the UserAdmin page.
|
Python
|
bsd-3-clause
|
terranodo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,venicegeo/eventkit-cloud,terranodo/eventkit-cloud,venicegeo/eventkit-cloud
|
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
+ class OAuthInline(admin.StackedInline):
+ model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
-
- UserAdmin.inlines = [UserLicenseInline]
+ UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
Add OAuth class information to the UserAdmin page.
|
## Code Before:
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
## Instruction:
Add OAuth class information to the UserAdmin page.
## Code After:
import logging
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.jobs.models import UserLicense
logger = logging.getLogger(__name__)
class OAuthAdmin(admin.ModelAdmin):
search_fields = ['user__username', 'identification', 'commonname', 'user_info']
list_display = ['user', 'identification', 'commonname']
def has_delete_permission(self, request, obj=None):
return False
def get_actions(self, request):
actions = super(OAuthAdmin, self).get_actions(request)
actions.pop('delete_selected', None)
return actions
class OAuthInline(admin.StackedInline):
model = OAuth
class UserLicenseInline(admin.TabularInline):
model = UserLicense
extra = 0
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
admin.site.unregister(Token)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(OAuth, OAuthAdmin)
|
// ... existing code ...
class OAuthInline(admin.StackedInline):
model = OAuth
// ... modified code ...
UserAdmin.inlines = [OAuthInline, UserLicenseInline]
UserAdmin.readonly_fields += 'last_login', 'date_joined'
// ... rest of the code ...
|
635a51c1ee8f5608de03351008f0d5aa9a116660
|
opps/images/templatetags/images_tags.py
|
opps/images/templatetags/images_tags.py
|
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
Fix has no attribute on templatetags image_obj
|
Fix has no attribute on templatetags image_obj
|
Python
|
mit
|
jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps
|
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
+ if getattr(image, 'flip'):
- new['flip'] = image.flip
+ new['flip'] = image.flip
+ if getattr(image, 'flop'):
- new['flop'] = image.flop
+ new['flop'] = image.flop
- if image.halign:
+ if getattr(image, 'halign'):
new['halign'] = image.halign
- if image.valign:
+ if getattr(image, 'valign'):
new['valign'] = image.valign
+ if getattr(image, 'fit_in'):
- new['fit_in'] = image.fit_in
+ new['fit_in'] = image.fit_in
+ if getattr(image, 'smart'):
- new['smart'] = image.smart
+ new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
Fix has no attribute on templatetags image_obj
|
## Code Before:
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
new['flip'] = image.flip
new['flop'] = image.flop
if image.halign:
new['halign'] = image.halign
if image.valign:
new['valign'] = image.valign
new['fit_in'] = image.fit_in
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
## Instruction:
Fix has no attribute on templatetags image_obj
## Code After:
from django import template
from ..generate import image_url as url
register = template.Library()
@register.simple_tag
def image_url(image_url, **kwargs):
return url(image_url=image_url, **kwargs)
@register.simple_tag
def image_obj(image, **kwargs):
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
kwargs = dict(new, **kwargs)
return url(image_url=image.image.url, **kwargs)
|
# ... existing code ...
new = {}
if getattr(image, 'flip'):
new['flip'] = image.flip
if getattr(image, 'flop'):
new['flop'] = image.flop
if getattr(image, 'halign'):
new['halign'] = image.halign
if getattr(image, 'valign'):
new['valign'] = image.valign
if getattr(image, 'fit_in'):
new['fit_in'] = image.fit_in
if getattr(image, 'smart'):
new['smart'] = image.smart
# ... rest of the code ...
|
300471024ff16026d23bf60008d19784604b2eb3
|
gala-training-crossval-sub.py
|
gala-training-crossval-sub.py
|
from gala import classify
datas = []
labels = []
import numpy as np
list(map(np.shape, labels))
for i in range(3, 4):
data, label = classify.load_training_data_from_disk('training-data-%i.h5' % i, names=['data', 'labels'])
datas.append(data)
labels.append(label[:, 0])
X0 = np.concatenate(datas, axis=0)
y0 = np.concatenate(labels)
idx = np.random.choice(len(y0), size=3000, replace=False)
X, y = X0[idx], y0[idx]
param_dist = {'n_estimators': [20, 100, 200, 500],
'max_depth': [3, 5, 20, None],
'max_features': ['auto', 5, 10, 20],
'bootstrap': [True, False],
'criterion': ['gini', 'entropy']}
from sklearn import grid_search as gs
from time import time
from sklearn import ensemble
ensemble.RandomForestClassifier().get_params().keys()
rf = ensemble.RandomForestClassifier()
random_search = gs.GridSearchCV(rf, param_grid=param_dist, refit=False,
verbose=2, n_jobs=12)
start=time(); random_search.fit(X, y); stop=time()
|
from gala import classify
datas = []
labels = []
import numpy as np
list(map(np.shape, labels))
for i in range(3, 4):
data, label = classify.load_training_data_from_disk('training-data-%i.h5' % i, names=['data', 'labels'])
datas.append(data)
labels.append(label[:, 0])
X0 = np.concatenate(datas, axis=0)
y0 = np.concatenate(labels)
# runtime was 5min for 3000 samples, expect ~2h for 72,000
idx = np.random.choice(len(y0), size=72000, replace=False)
X, y = X0[idx], y0[idx]
param_dist = {'n_estimators': [20, 100, 200, 500],
'max_depth': [3, 5, 20, None],
'max_features': ['auto', 5, 10, 20],
'bootstrap': [True, False],
'criterion': ['gini', 'entropy']}
from sklearn import grid_search as gs
from time import time
from sklearn import ensemble
ensemble.RandomForestClassifier().get_params().keys()
rf = ensemble.RandomForestClassifier()
random_search = gs.GridSearchCV(rf, param_grid=param_dist, refit=False,
verbose=2, n_jobs=12)
start=time(); random_search.fit(X, y); stop=time()
|
Add run for 72k samples
|
Add run for 72k samples
|
Python
|
bsd-3-clause
|
jni/gala-scripts
|
from gala import classify
datas = []
labels = []
import numpy as np
list(map(np.shape, labels))
for i in range(3, 4):
data, label = classify.load_training_data_from_disk('training-data-%i.h5' % i, names=['data', 'labels'])
datas.append(data)
labels.append(label[:, 0])
X0 = np.concatenate(datas, axis=0)
y0 = np.concatenate(labels)
+ # runtime was 5min for 3000 samples, expect ~2h for 72,000
- idx = np.random.choice(len(y0), size=3000, replace=False)
+ idx = np.random.choice(len(y0), size=72000, replace=False)
X, y = X0[idx], y0[idx]
param_dist = {'n_estimators': [20, 100, 200, 500],
'max_depth': [3, 5, 20, None],
'max_features': ['auto', 5, 10, 20],
'bootstrap': [True, False],
'criterion': ['gini', 'entropy']}
from sklearn import grid_search as gs
from time import time
from sklearn import ensemble
ensemble.RandomForestClassifier().get_params().keys()
rf = ensemble.RandomForestClassifier()
random_search = gs.GridSearchCV(rf, param_grid=param_dist, refit=False,
verbose=2, n_jobs=12)
start=time(); random_search.fit(X, y); stop=time()
|
Add run for 72k samples
|
## Code Before:
from gala import classify
datas = []
labels = []
import numpy as np
list(map(np.shape, labels))
for i in range(3, 4):
data, label = classify.load_training_data_from_disk('training-data-%i.h5' % i, names=['data', 'labels'])
datas.append(data)
labels.append(label[:, 0])
X0 = np.concatenate(datas, axis=0)
y0 = np.concatenate(labels)
idx = np.random.choice(len(y0), size=3000, replace=False)
X, y = X0[idx], y0[idx]
param_dist = {'n_estimators': [20, 100, 200, 500],
'max_depth': [3, 5, 20, None],
'max_features': ['auto', 5, 10, 20],
'bootstrap': [True, False],
'criterion': ['gini', 'entropy']}
from sklearn import grid_search as gs
from time import time
from sklearn import ensemble
ensemble.RandomForestClassifier().get_params().keys()
rf = ensemble.RandomForestClassifier()
random_search = gs.GridSearchCV(rf, param_grid=param_dist, refit=False,
verbose=2, n_jobs=12)
start=time(); random_search.fit(X, y); stop=time()
## Instruction:
Add run for 72k samples
## Code After:
from gala import classify
datas = []
labels = []
import numpy as np
list(map(np.shape, labels))
for i in range(3, 4):
data, label = classify.load_training_data_from_disk('training-data-%i.h5' % i, names=['data', 'labels'])
datas.append(data)
labels.append(label[:, 0])
X0 = np.concatenate(datas, axis=0)
y0 = np.concatenate(labels)
# runtime was 5min for 3000 samples, expect ~2h for 72,000
idx = np.random.choice(len(y0), size=72000, replace=False)
X, y = X0[idx], y0[idx]
param_dist = {'n_estimators': [20, 100, 200, 500],
'max_depth': [3, 5, 20, None],
'max_features': ['auto', 5, 10, 20],
'bootstrap': [True, False],
'criterion': ['gini', 'entropy']}
from sklearn import grid_search as gs
from time import time
from sklearn import ensemble
ensemble.RandomForestClassifier().get_params().keys()
rf = ensemble.RandomForestClassifier()
random_search = gs.GridSearchCV(rf, param_grid=param_dist, refit=False,
verbose=2, n_jobs=12)
start=time(); random_search.fit(X, y); stop=time()
|
// ... existing code ...
y0 = np.concatenate(labels)
# runtime was 5min for 3000 samples, expect ~2h for 72,000
idx = np.random.choice(len(y0), size=72000, replace=False)
X, y = X0[idx], y0[idx]
// ... rest of the code ...
|
16c9563a75792aba7ccc0d979f579d64dc0140c1
|
common_rg_bar.py
|
common_rg_bar.py
|
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
def main():
if len(sys.argv) >= 2:
code = sys.argv[1]
if code == 'x':
col_char = '3'
cols_limit = 78
code = '' # No code provided - only yellow bar
else:
if code == 'y':
col_char = '3'
else:
value = int(code)
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2 - len(code)),
code,
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
|
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
3. (optional) Text to display (without color)
shows red/green bar to visualize return code of previous command
'''
import sys
def main():
if len(sys.argv) >= 2:
code = sys.argv[1]
if code == 'x':
col_char = '3'
cols_limit = 78
code = '' # No code provided - only yellow bar
else:
if code == 'y':
col_char = '3'
else:
value = int(code)
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
if len(sys.argv) >= 4:
start_text = sys.argv[3] + ' '
else:
start_text = ''
esc = chr(27)
print (''.join((
start_text,
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2 - len(code) - len(start_text)),
code,
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
3. (optional) Text to display
''' % dict(
prog_name=sys.argv[0],
))
|
Add optional text to display
|
Add optional text to display
|
Python
|
mit
|
kwadrat/rgb_tdd
|
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
+ 3. (optional) Text to display (without color)
shows red/green bar to visualize return code of previous command
'''
import sys
def main():
if len(sys.argv) >= 2:
code = sys.argv[1]
if code == 'x':
col_char = '3'
cols_limit = 78
code = '' # No code provided - only yellow bar
else:
if code == 'y':
col_char = '3'
else:
value = int(code)
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
+ if len(sys.argv) >= 4:
+ start_text = sys.argv[3] + ' '
+ else:
+ start_text = ''
esc = chr(27)
print (''.join((
+ start_text,
esc,
'[4',
col_char,
'm',
- ' ' * (cols_limit - 2 - len(code)),
+ ' ' * (cols_limit - 2 - len(code) - len(start_text)),
code,
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
+ 3. (optional) Text to display
''' % dict(
prog_name=sys.argv[0],
))
|
Add optional text to display
|
## Code Before:
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
shows red/green bar to visualize return code of previous command
'''
import sys
def main():
if len(sys.argv) >= 2:
code = sys.argv[1]
if code == 'x':
col_char = '3'
cols_limit = 78
code = '' # No code provided - only yellow bar
else:
if code == 'y':
col_char = '3'
else:
value = int(code)
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
esc = chr(27)
print (''.join((
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2 - len(code)),
code,
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
''' % dict(
prog_name=sys.argv[0],
))
## Instruction:
Add optional text to display
## Code After:
'''
Given:
1. status code: (0 - OK, other value - BAD)
2. terminal window width
3. (optional) Text to display (without color)
shows red/green bar to visualize return code of previous command
'''
import sys
def main():
if len(sys.argv) >= 2:
code = sys.argv[1]
if code == 'x':
col_char = '3'
cols_limit = 78
code = '' # No code provided - only yellow bar
else:
if code == 'y':
col_char = '3'
else:
value = int(code)
if value:
col_char = '1'
else:
col_char = '2'
cols_limit = int(sys.argv[2])
if len(sys.argv) >= 4:
start_text = sys.argv[3] + ' '
else:
start_text = ''
esc = chr(27)
print (''.join((
start_text,
esc,
'[4',
col_char,
'm',
' ' * (cols_limit - 2 - len(code) - len(start_text)),
code,
esc,
'[0m',
)))
else:
print ('''
Usage: %(prog_name)s status_code number_of_columns
1. status code: 0 - OK (green color), other values - BAD (red color)
2. number of columns: the width of text console
3. (optional) Text to display
''' % dict(
prog_name=sys.argv[0],
))
|
# ... existing code ...
2. terminal window width
3. (optional) Text to display (without color)
# ... modified code ...
cols_limit = int(sys.argv[2])
if len(sys.argv) >= 4:
start_text = sys.argv[3] + ' '
else:
start_text = ''
esc = chr(27)
...
print (''.join((
start_text,
esc,
...
'm',
' ' * (cols_limit - 2 - len(code) - len(start_text)),
code,
...
2. number of columns: the width of text console
3. (optional) Text to display
''' % dict(
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.