commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
e0d2ce09475e3ae07e2740cbf0e342f68c1564a8
|
gn/standalone/toolchain/linux_find_llvm.py
|
gn/standalone/toolchain/linux_find_llvm.py
|
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
|
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
Fix issue with finding llvm when using python3
|
gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <[email protected]>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108
|
Python
|
apache-2.0
|
google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto
|
python
|
## Code Before:
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
## Instruction:
gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <[email protected]>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108
## Code After:
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
...
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
...
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
...
|
e4f8f684083d69ad6346328e6d020da139a07218
|
api/src/main/java/org/runestar/client/api/LafInstallation.kt
|
api/src/main/java/org/runestar/client/api/LafInstallation.kt
|
package org.runestar.client.api
import com.alee.laf.WebLookAndFeel
import com.alee.skin.dark.DarkSkin
import org.runestar.general.fonts.RUNESCAPE_CHAT_BOLD_FONT
import org.runestar.general.fonts.RUNESCAPE_CHAT_FONT
import javax.swing.JPopupMenu
import javax.swing.UIManager
import javax.swing.plaf.FontUIResource
internal object LafInstallation : Runnable {
override fun run() {
val chatFont = FontUIResource(RUNESCAPE_CHAT_FONT)
val boldFont = FontUIResource(RUNESCAPE_CHAT_BOLD_FONT)
UIManager.put("Label.font", chatFont)
UIManager.put("MenuItem.font", boldFont)
WebLookAndFeel.install(DarkSkin::class.java)
JPopupMenu.setDefaultLightWeightPopupEnabled(false)
}
}
|
package org.runestar.client.api
import com.alee.laf.WebLookAndFeel
import com.alee.skin.dark.DarkSkin
import org.runestar.general.fonts.RUNESCAPE_CHAT_FONT
import javax.swing.JPopupMenu
import javax.swing.UIManager
import javax.swing.plaf.FontUIResource
internal object LafInstallation : Runnable {
override fun run() {
val chatFont = FontUIResource(RUNESCAPE_CHAT_FONT)
UIManager.put("Label.font", chatFont)
UIManager.put("Button.font", chatFont)
UIManager.put("ToggleButton.font", chatFont)
UIManager.put("Tree.font", chatFont)
UIManager.put("ComboBox.font", chatFont)
UIManager.put("MenuItem.font", chatFont)
WebLookAndFeel.install(DarkSkin::class.java)
JPopupMenu.setDefaultLightWeightPopupEnabled(false)
}
}
|
Change more default component fonts
|
Change more default component fonts
|
Kotlin
|
mit
|
RuneSuite/client,RuneSuite/client
|
kotlin
|
## Code Before:
package org.runestar.client.api
import com.alee.laf.WebLookAndFeel
import com.alee.skin.dark.DarkSkin
import org.runestar.general.fonts.RUNESCAPE_CHAT_BOLD_FONT
import org.runestar.general.fonts.RUNESCAPE_CHAT_FONT
import javax.swing.JPopupMenu
import javax.swing.UIManager
import javax.swing.plaf.FontUIResource
internal object LafInstallation : Runnable {
override fun run() {
val chatFont = FontUIResource(RUNESCAPE_CHAT_FONT)
val boldFont = FontUIResource(RUNESCAPE_CHAT_BOLD_FONT)
UIManager.put("Label.font", chatFont)
UIManager.put("MenuItem.font", boldFont)
WebLookAndFeel.install(DarkSkin::class.java)
JPopupMenu.setDefaultLightWeightPopupEnabled(false)
}
}
## Instruction:
Change more default component fonts
## Code After:
package org.runestar.client.api
import com.alee.laf.WebLookAndFeel
import com.alee.skin.dark.DarkSkin
import org.runestar.general.fonts.RUNESCAPE_CHAT_FONT
import javax.swing.JPopupMenu
import javax.swing.UIManager
import javax.swing.plaf.FontUIResource
internal object LafInstallation : Runnable {
override fun run() {
val chatFont = FontUIResource(RUNESCAPE_CHAT_FONT)
UIManager.put("Label.font", chatFont)
UIManager.put("Button.font", chatFont)
UIManager.put("ToggleButton.font", chatFont)
UIManager.put("Tree.font", chatFont)
UIManager.put("ComboBox.font", chatFont)
UIManager.put("MenuItem.font", chatFont)
WebLookAndFeel.install(DarkSkin::class.java)
JPopupMenu.setDefaultLightWeightPopupEnabled(false)
}
}
|
...
import com.alee.laf.WebLookAndFeel
import com.alee.skin.dark.DarkSkin
import org.runestar.general.fonts.RUNESCAPE_CHAT_FONT
import javax.swing.JPopupMenu
import javax.swing.UIManager
...
override fun run() {
val chatFont = FontUIResource(RUNESCAPE_CHAT_FONT)
UIManager.put("Label.font", chatFont)
UIManager.put("Button.font", chatFont)
UIManager.put("ToggleButton.font", chatFont)
UIManager.put("Tree.font", chatFont)
UIManager.put("ComboBox.font", chatFont)
UIManager.put("MenuItem.font", chatFont)
WebLookAndFeel.install(DarkSkin::class.java)
...
|
25349f02a3d5cdd7262481b43ec4cac4047fccd7
|
src/config/ServerConfig.java
|
src/config/ServerConfig.java
|
package config;
import heufybot.core.Logger;
import heufybot.utils.FileUtils;
import java.util.HashMap;
import org.yaml.snakeyaml.Yaml;
public class ServerConfig extends GlobalConfig
{
@SuppressWarnings("unchecked")
public void loadServerConfig(String fileName, HashMap<String, Object> globalSettings)
{
this.settings = globalSettings;
if(fileName == null)
{
Logger.error("Config", "No seperate server configs found. Using the settings from the global config instead.");
return;
}
Yaml yaml = new Yaml();
String settingsYaml = FileUtils.readFile(fileName);
HashMap<String, Object> serverSettings = (HashMap<String, Object>) yaml.load(settingsYaml);
for(String setting : serverSettings.keySet())
{
settings.put(setting, serverSettings.get(setting));
}
}
}
|
package config;
import heufybot.core.Logger;
import heufybot.utils.FileUtils;
import java.util.HashMap;
import org.yaml.snakeyaml.Yaml;
public class ServerConfig extends GlobalConfig
{
@SuppressWarnings("unchecked")
public void loadServerConfig(String fileName, HashMap<String, Object> globalSettings)
{
this.settings = (HashMap<String, Object>) globalSettings.clone();
if(fileName == null)
{
Logger.error("Config", "No seperate server configs found. Using the settings from the global config instead.");
return;
}
Yaml yaml = new Yaml();
String settingsYaml = FileUtils.readFile(fileName);
HashMap<String, Object> serverSettings = (HashMap<String, Object>) yaml.load(settingsYaml);
for(String setting : serverSettings.keySet())
{
settings.put(setting, serverSettings.get(setting));
}
}
}
|
Stop the server configs from overwriting the global one
|
Stop the server configs from overwriting the global one
|
Java
|
mit
|
Heufneutje/RE_HeufyBot,Heufneutje/RE_HeufyBot
|
java
|
## Code Before:
package config;
import heufybot.core.Logger;
import heufybot.utils.FileUtils;
import java.util.HashMap;
import org.yaml.snakeyaml.Yaml;
public class ServerConfig extends GlobalConfig
{
@SuppressWarnings("unchecked")
public void loadServerConfig(String fileName, HashMap<String, Object> globalSettings)
{
this.settings = globalSettings;
if(fileName == null)
{
Logger.error("Config", "No seperate server configs found. Using the settings from the global config instead.");
return;
}
Yaml yaml = new Yaml();
String settingsYaml = FileUtils.readFile(fileName);
HashMap<String, Object> serverSettings = (HashMap<String, Object>) yaml.load(settingsYaml);
for(String setting : serverSettings.keySet())
{
settings.put(setting, serverSettings.get(setting));
}
}
}
## Instruction:
Stop the server configs from overwriting the global one
## Code After:
package config;
import heufybot.core.Logger;
import heufybot.utils.FileUtils;
import java.util.HashMap;
import org.yaml.snakeyaml.Yaml;
public class ServerConfig extends GlobalConfig
{
@SuppressWarnings("unchecked")
public void loadServerConfig(String fileName, HashMap<String, Object> globalSettings)
{
this.settings = (HashMap<String, Object>) globalSettings.clone();
if(fileName == null)
{
Logger.error("Config", "No seperate server configs found. Using the settings from the global config instead.");
return;
}
Yaml yaml = new Yaml();
String settingsYaml = FileUtils.readFile(fileName);
HashMap<String, Object> serverSettings = (HashMap<String, Object>) yaml.load(settingsYaml);
for(String setting : serverSettings.keySet())
{
settings.put(setting, serverSettings.get(setting));
}
}
}
|
# ... existing code ...
@SuppressWarnings("unchecked")
public void loadServerConfig(String fileName, HashMap<String, Object> globalSettings)
{
this.settings = (HashMap<String, Object>) globalSettings.clone();
if(fileName == null)
{
Logger.error("Config", "No seperate server configs found. Using the settings from the global config instead.");
# ... rest of the code ...
|
164a80ce3bcffad0e233426830c712cddd2f750b
|
thefederation/apps.py
|
thefederation/apps.py
|
import datetime
import sys
import django_rq
from django.apps import AppConfig
class TheFederationConfig(AppConfig):
name = "thefederation"
verbose_name = "The Federation"
def ready(self):
# Only register tasks if RQ Scheduler process
if "rqscheduler" not in sys.argv:
return
from thefederation.social import make_daily_post
from thefederation.tasks import aggregate_daily_stats
from thefederation.tasks import clean_duplicate_nodes
from thefederation.tasks import poll_nodes
scheduler = django_rq.get_scheduler()
# Delete any existing jobs in the scheduler when the app starts up
for job in scheduler.get_jobs():
job.delete()
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=aggregate_daily_stats,
interval=5500,
queue_name='high',
)
scheduler.cron(
'0 10 * * *',
func=make_daily_post,
queue_name='high',
)
scheduler.cron(
'18 4 * * *',
func=clean_duplicate_nodes,
queue_name='medium',
)
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=poll_nodes,
interval=10800,
queue_name='medium',
)
|
import datetime
import sys
import django_rq
from django.apps import AppConfig
class TheFederationConfig(AppConfig):
name = "thefederation"
verbose_name = "The Federation"
def ready(self):
# Only register tasks if RQ Scheduler process
if "rqscheduler" not in sys.argv:
return
from thefederation.social import make_daily_post
from thefederation.tasks import aggregate_daily_stats
from thefederation.tasks import clean_duplicate_nodes
from thefederation.tasks import poll_nodes
scheduler = django_rq.get_scheduler()
# Delete any existing jobs in the scheduler when the app starts up
for job in scheduler.get_jobs():
job.delete()
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=aggregate_daily_stats,
interval=5500,
queue_name='high',
)
scheduler.cron(
'0 10 * * *',
func=make_daily_post,
queue_name='high',
)
scheduler.cron(
'18 4 * * *',
func=clean_duplicate_nodes,
queue_name='medium',
timeout=3600,
)
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=poll_nodes,
interval=10800,
queue_name='medium',
)
|
Increase timeout of clean_duplicate_nodes job
|
Increase timeout of clean_duplicate_nodes job
|
Python
|
agpl-3.0
|
jaywink/diaspora-hub,jaywink/diaspora-hub,jaywink/the-federation.info,jaywink/diaspora-hub,jaywink/the-federation.info,jaywink/the-federation.info
|
python
|
## Code Before:
import datetime
import sys
import django_rq
from django.apps import AppConfig
class TheFederationConfig(AppConfig):
name = "thefederation"
verbose_name = "The Federation"
def ready(self):
# Only register tasks if RQ Scheduler process
if "rqscheduler" not in sys.argv:
return
from thefederation.social import make_daily_post
from thefederation.tasks import aggregate_daily_stats
from thefederation.tasks import clean_duplicate_nodes
from thefederation.tasks import poll_nodes
scheduler = django_rq.get_scheduler()
# Delete any existing jobs in the scheduler when the app starts up
for job in scheduler.get_jobs():
job.delete()
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=aggregate_daily_stats,
interval=5500,
queue_name='high',
)
scheduler.cron(
'0 10 * * *',
func=make_daily_post,
queue_name='high',
)
scheduler.cron(
'18 4 * * *',
func=clean_duplicate_nodes,
queue_name='medium',
)
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=poll_nodes,
interval=10800,
queue_name='medium',
)
## Instruction:
Increase timeout of clean_duplicate_nodes job
## Code After:
import datetime
import sys
import django_rq
from django.apps import AppConfig
class TheFederationConfig(AppConfig):
name = "thefederation"
verbose_name = "The Federation"
def ready(self):
# Only register tasks if RQ Scheduler process
if "rqscheduler" not in sys.argv:
return
from thefederation.social import make_daily_post
from thefederation.tasks import aggregate_daily_stats
from thefederation.tasks import clean_duplicate_nodes
from thefederation.tasks import poll_nodes
scheduler = django_rq.get_scheduler()
# Delete any existing jobs in the scheduler when the app starts up
for job in scheduler.get_jobs():
job.delete()
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=aggregate_daily_stats,
interval=5500,
queue_name='high',
)
scheduler.cron(
'0 10 * * *',
func=make_daily_post,
queue_name='high',
)
scheduler.cron(
'18 4 * * *',
func=clean_duplicate_nodes,
queue_name='medium',
timeout=3600,
)
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
func=poll_nodes,
interval=10800,
queue_name='medium',
)
|
// ... existing code ...
'18 4 * * *',
func=clean_duplicate_nodes,
queue_name='medium',
timeout=3600,
)
scheduler.schedule(
scheduled_time=datetime.datetime.utcnow(),
// ... rest of the code ...
|
ac378008d178b88607954b341fe8abc7ac36b45b
|
src/java/fault/metrics/Second.java
|
src/java/fault/metrics/Second.java
|
package fault.metrics;
import fault.concurrent.LongAdder;
/**
* Created by timbrooks on 6/1/15.
*/
public class Second {
private final LongAdder successes = new LongAdder();
private final LongAdder errors = new LongAdder();
private final LongAdder timeouts = new LongAdder();
private final LongAdder circuitOpen = new LongAdder();
private final LongAdder queueFull = new LongAdder();
private final LongAdder maxConcurrencyExceeded = new LongAdder();
}
|
package fault.metrics;
import fault.concurrent.LongAdder;
/**
* Created by timbrooks on 6/1/15.
*/
public class Second {
private final LongAdder successes = new LongAdder();
private final LongAdder errors = new LongAdder();
private final LongAdder timeouts = new LongAdder();
private final LongAdder circuitOpen = new LongAdder();
private final LongAdder queueFull = new LongAdder();
private final LongAdder maxConcurrencyExceeded = new LongAdder();
public void incrementMetric(Metric metric) {
switch (metric) {
case SUCCESS:
successes.increment();
break;
case ERROR:
errors.increment();
break;
case TIMEOUT:
timeouts.increment();
break;
case CIRCUIT_OPEN:
circuitOpen.increment();
break;
case QUEUE_FULL:
queueFull.increment();
break;
case MAX_CONCURRENCY_LEVEL_EXCEEDED:
maxConcurrencyExceeded.increment();
break;
default:
throw new RuntimeException("Unknown metric: " + metric);
}
}
}
|
Add method to increment metrics
|
Add method to increment metrics
|
Java
|
apache-2.0
|
tbrooks8/Beehive
|
java
|
## Code Before:
package fault.metrics;
import fault.concurrent.LongAdder;
/**
* Created by timbrooks on 6/1/15.
*/
public class Second {
private final LongAdder successes = new LongAdder();
private final LongAdder errors = new LongAdder();
private final LongAdder timeouts = new LongAdder();
private final LongAdder circuitOpen = new LongAdder();
private final LongAdder queueFull = new LongAdder();
private final LongAdder maxConcurrencyExceeded = new LongAdder();
}
## Instruction:
Add method to increment metrics
## Code After:
package fault.metrics;
import fault.concurrent.LongAdder;
/**
* Created by timbrooks on 6/1/15.
*/
public class Second {
private final LongAdder successes = new LongAdder();
private final LongAdder errors = new LongAdder();
private final LongAdder timeouts = new LongAdder();
private final LongAdder circuitOpen = new LongAdder();
private final LongAdder queueFull = new LongAdder();
private final LongAdder maxConcurrencyExceeded = new LongAdder();
public void incrementMetric(Metric metric) {
switch (metric) {
case SUCCESS:
successes.increment();
break;
case ERROR:
errors.increment();
break;
case TIMEOUT:
timeouts.increment();
break;
case CIRCUIT_OPEN:
circuitOpen.increment();
break;
case QUEUE_FULL:
queueFull.increment();
break;
case MAX_CONCURRENCY_LEVEL_EXCEEDED:
maxConcurrencyExceeded.increment();
break;
default:
throw new RuntimeException("Unknown metric: " + metric);
}
}
}
|
// ... existing code ...
private final LongAdder circuitOpen = new LongAdder();
private final LongAdder queueFull = new LongAdder();
private final LongAdder maxConcurrencyExceeded = new LongAdder();
public void incrementMetric(Metric metric) {
switch (metric) {
case SUCCESS:
successes.increment();
break;
case ERROR:
errors.increment();
break;
case TIMEOUT:
timeouts.increment();
break;
case CIRCUIT_OPEN:
circuitOpen.increment();
break;
case QUEUE_FULL:
queueFull.increment();
break;
case MAX_CONCURRENCY_LEVEL_EXCEEDED:
maxConcurrencyExceeded.increment();
break;
default:
throw new RuntimeException("Unknown metric: " + metric);
}
}
}
// ... rest of the code ...
|
5af61cae2ca438880357f88533cfa77ea161efac
|
corehq/ex-submodules/pillow_retry/admin.py
|
corehq/ex-submodules/pillow_retry/admin.py
|
from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
|
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
Add delete action to PillowRetry
|
Add delete action to PillowRetry
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
python
|
## Code Before:
from django.contrib import admin
from .models import PillowError
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
admin.site.register(PillowError, PillowErrorAdmin)
## Instruction:
Add delete action to PillowRetry
## Code After:
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
|
# ... existing code ...
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
# ... modified code ...
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
# ... rest of the code ...
|
f67704c271b8b88ba97d1b44c73552119d79b048
|
tests/test_utils.py
|
tests/test_utils.py
|
import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
|
from numpy.testing import assert_raises, assert_equal
from six.moves import range, cPickle
from fuel.iterator import DataIterator
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_picklable", "bulky_attr")
class DummyClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_picklable = lambda x: x
class FaultyClass(object):
pass
@do_not_pickle_attributes("iterator")
class UnpicklableClass(object):
def __init__(self):
self.load()
def load(self):
self.iterator = DataIterator(None)
@do_not_pickle_attributes("attribute")
class NonLoadingClass(object):
def load(self):
pass
class TestDoNotPickleAttributes(object):
def test_load(self):
instance = cPickle.loads(cPickle.dumps(DummyClass()))
assert_equal(instance.bulky_attr, list(range(100)))
assert instance.non_picklable is not None
def test_value_error_no_load_method(self):
assert_raises(ValueError, do_not_pickle_attributes("x"), FaultyClass)
def test_value_error_iterator(self):
assert_raises(ValueError, cPickle.dumps, UnpicklableClass())
def test_value_error_attribute_non_loaded(self):
assert_raises(ValueError, getattr, NonLoadingClass(), 'attribute')
|
Increase test coverage in utils.py
|
Increase test coverage in utils.py
|
Python
|
mit
|
chrishokamp/fuel,ejls/fuel,harmdevries89/fuel,glewis17/fuel,EderSantana/fuel,rodrigob/fuel,markusnagel/fuel,bouthilx/fuel,janchorowski/fuel,aalmah/fuel,vdumoulin/fuel,rodrigob/fuel,orhanf/fuel,laurent-dinh/fuel,rizar/fuel,capybaralet/fuel,aalmah/fuel,hantek/fuel,mila-udem/fuel,janchorowski/fuel,hantek/fuel,dmitriy-serdyuk/fuel,rizar/fuel,dwf/fuel,dhruvparamhans/fuel,chrishokamp/fuel,harmdevries89/fuel,dhruvparamhans/fuel,markusnagel/fuel,mjwillson/fuel,mila-udem/fuel,jbornschein/fuel,udibr/fuel,orhanf/fuel,glewis17/fuel,bouthilx/fuel,dmitriy-serdyuk/fuel,codeaudit/fuel,dwf/fuel,dribnet/fuel,mjwillson/fuel,jbornschein/fuel,ejls/fuel,laurent-dinh/fuel,vdumoulin/fuel,dribnet/fuel,lamblin/fuel,capybaralet/fuel,codeaudit/fuel,EderSantana/fuel,udibr/fuel,lamblin/fuel
|
python
|
## Code Before:
import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
## Instruction:
Increase test coverage in utils.py
## Code After:
from numpy.testing import assert_raises, assert_equal
from six.moves import range, cPickle
from fuel.iterator import DataIterator
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_picklable", "bulky_attr")
class DummyClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_picklable = lambda x: x
class FaultyClass(object):
pass
@do_not_pickle_attributes("iterator")
class UnpicklableClass(object):
def __init__(self):
self.load()
def load(self):
self.iterator = DataIterator(None)
@do_not_pickle_attributes("attribute")
class NonLoadingClass(object):
def load(self):
pass
class TestDoNotPickleAttributes(object):
def test_load(self):
instance = cPickle.loads(cPickle.dumps(DummyClass()))
assert_equal(instance.bulky_attr, list(range(100)))
assert instance.non_picklable is not None
def test_value_error_no_load_method(self):
assert_raises(ValueError, do_not_pickle_attributes("x"), FaultyClass)
def test_value_error_iterator(self):
assert_raises(ValueError, cPickle.dumps, UnpicklableClass())
def test_value_error_attribute_non_loaded(self):
assert_raises(ValueError, getattr, NonLoadingClass(), 'attribute')
|
// ... existing code ...
from numpy.testing import assert_raises, assert_equal
from six.moves import range, cPickle
from fuel.iterator import DataIterator
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_picklable", "bulky_attr")
class DummyClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_picklable = lambda x: x
class FaultyClass(object):
pass
@do_not_pickle_attributes("iterator")
class UnpicklableClass(object):
def __init__(self):
self.load()
def load(self):
self.iterator = DataIterator(None)
@do_not_pickle_attributes("attribute")
class NonLoadingClass(object):
def load(self):
pass
class TestDoNotPickleAttributes(object):
def test_load(self):
instance = cPickle.loads(cPickle.dumps(DummyClass()))
assert_equal(instance.bulky_attr, list(range(100)))
assert instance.non_picklable is not None
def test_value_error_no_load_method(self):
assert_raises(ValueError, do_not_pickle_attributes("x"), FaultyClass)
def test_value_error_iterator(self):
assert_raises(ValueError, cPickle.dumps, UnpicklableClass())
def test_value_error_attribute_non_loaded(self):
assert_raises(ValueError, getattr, NonLoadingClass(), 'attribute')
// ... rest of the code ...
|
600e96defa787fe4fa427d7447323847c59e85b3
|
src/main/java/yokohama/unit/ast_junit/SuchThatMatcherExpr.java
|
src/main/java/yokohama/unit/ast_junit/SuchThatMatcherExpr.java
|
package yokohama.unit.ast_junit;
import java.util.List;
import java.util.Set;
import lombok.EqualsAndHashCode;
import lombok.Value;
import yokohama.unit.util.SBuilder;
import static yokohama.unit.util.SetUtils.setOf;
@Value
@EqualsAndHashCode(callSuper=false)
public class SuchThatMatcherExpr extends MatcherExpr {
private List<Statement> statements;
@Override
public void getExpr(SBuilder sb, String varName, ExpressionStrategy expressionStrategy, MockStrategy mockStrategy) {
sb.appendln("Matcher ", varName, " = new BaseMatcher() {");
sb.shift();
sb.appendln("@Override");
sb.appendln("public boolean matches(Object obj) {");
sb.shift();
for (Statement statement : statements) {
statement.toString(sb, expressionStrategy, mockStrategy);
}
sb.unshift();
sb.appendln("}");
sb.unshift();
sb.appendln("};");
}
@Override
public Set<ImportedName> importedNames() {
return setOf(new ImportClass("org.hamcrest.BaseMatcher"));
}
}
|
package yokohama.unit.ast_junit;
import java.util.List;
import java.util.Set;
import lombok.EqualsAndHashCode;
import lombok.Value;
import yokohama.unit.util.SBuilder;
import static yokohama.unit.util.SetUtils.setOf;
@Value
@EqualsAndHashCode(callSuper=false)
public class SuchThatMatcherExpr extends MatcherExpr {
private List<Statement> statements;
@Override
public void getExpr(SBuilder sb, String varName, ExpressionStrategy expressionStrategy, MockStrategy mockStrategy) {
sb.appendln("Matcher ", varName, " = new BaseMatcher() {");
sb.shift();
sb.appendln("@Override");
sb.appendln("public boolean matches(Object obj) {");
sb.shift();
for (Statement statement : statements) {
statement.toString(sb, expressionStrategy, mockStrategy);
}
sb.unshift();
sb.appendln("}");
sb.appendln("@Override");
sb.appendln("public void describeTo(Description description) {");
sb.appendln("}");
sb.unshift();
sb.appendln("};");
}
@Override
public Set<ImportedName> importedNames() {
return setOf(
new ImportClass("org.hamcrest.BaseMatcher"),
new ImportClass("org.hamcrest.Description"));
}
}
|
Add (empty) decribeTo method to generated code
|
Add (empty) decribeTo method to generated code
|
Java
|
mit
|
tkob/yokohamaunit,tkob/yokohamaunit
|
java
|
## Code Before:
package yokohama.unit.ast_junit;
import java.util.List;
import java.util.Set;
import lombok.EqualsAndHashCode;
import lombok.Value;
import yokohama.unit.util.SBuilder;
import static yokohama.unit.util.SetUtils.setOf;
@Value
@EqualsAndHashCode(callSuper=false)
public class SuchThatMatcherExpr extends MatcherExpr {
private List<Statement> statements;
@Override
public void getExpr(SBuilder sb, String varName, ExpressionStrategy expressionStrategy, MockStrategy mockStrategy) {
sb.appendln("Matcher ", varName, " = new BaseMatcher() {");
sb.shift();
sb.appendln("@Override");
sb.appendln("public boolean matches(Object obj) {");
sb.shift();
for (Statement statement : statements) {
statement.toString(sb, expressionStrategy, mockStrategy);
}
sb.unshift();
sb.appendln("}");
sb.unshift();
sb.appendln("};");
}
@Override
public Set<ImportedName> importedNames() {
return setOf(new ImportClass("org.hamcrest.BaseMatcher"));
}
}
## Instruction:
Add (empty) decribeTo method to generated code
## Code After:
package yokohama.unit.ast_junit;
import java.util.List;
import java.util.Set;
import lombok.EqualsAndHashCode;
import lombok.Value;
import yokohama.unit.util.SBuilder;
import static yokohama.unit.util.SetUtils.setOf;
@Value
@EqualsAndHashCode(callSuper=false)
public class SuchThatMatcherExpr extends MatcherExpr {
private List<Statement> statements;
@Override
public void getExpr(SBuilder sb, String varName, ExpressionStrategy expressionStrategy, MockStrategy mockStrategy) {
sb.appendln("Matcher ", varName, " = new BaseMatcher() {");
sb.shift();
sb.appendln("@Override");
sb.appendln("public boolean matches(Object obj) {");
sb.shift();
for (Statement statement : statements) {
statement.toString(sb, expressionStrategy, mockStrategy);
}
sb.unshift();
sb.appendln("}");
sb.appendln("@Override");
sb.appendln("public void describeTo(Description description) {");
sb.appendln("}");
sb.unshift();
sb.appendln("};");
}
@Override
public Set<ImportedName> importedNames() {
return setOf(
new ImportClass("org.hamcrest.BaseMatcher"),
new ImportClass("org.hamcrest.Description"));
}
}
|
...
}
sb.unshift();
sb.appendln("}");
sb.appendln("@Override");
sb.appendln("public void describeTo(Description description) {");
sb.appendln("}");
sb.unshift();
sb.appendln("};");
}
...
@Override
public Set<ImportedName> importedNames() {
return setOf(
new ImportClass("org.hamcrest.BaseMatcher"),
new ImportClass("org.hamcrest.Description"));
}
}
...
|
ea45d42fc7afa5f0b40a7e774a58c0545492ee55
|
src/main/java/pw/ian/albkit/AlbPlugin.java
|
src/main/java/pw/ian/albkit/AlbPlugin.java
|
package pw.ian.albkit;
import org.bukkit.Server;
import org.bukkit.event.Listener;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.ServicesManager;
import org.bukkit.plugin.java.JavaPlugin;
/**
* A base plugin class which makes creating new plugins more convenient, as it
* removes the necessity to initialise server, plugin manager and services
* manager variables as well as providing an easy way to register listeners
*
* @author Ollie
*/
public abstract class AlbPlugin extends JavaPlugin {
protected Server server;
protected PluginManager pluginMgr;
protected ServicesManager servicesMgr;
public abstract void onEnable();
/**
* Initialises variables etc for this plugin. Should be called at the start
* of the onEnable() implementation in extensions of this class
*/
protected void init() {
server = getServer();
pluginMgr = server.getPluginManager();
servicesMgr = server.getServicesManager();
}
/**
* Registers the given listener to this JavaPlugin object
*
* @param listener The Listener to register
*/
protected void register(final Listener listener) {
pluginMgr.registerEvents(listener, this);
}
}
|
package pw.ian.albkit;
import pw.ian.albkit.command.CommandHandler;
import pw.ian.albkit.command.Commands;
import org.bukkit.Server;
import org.bukkit.event.Listener;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.ServicesManager;
import org.bukkit.plugin.java.JavaPlugin;
/**
* A base plugin class which makes creating new plugins more convenient, as it
* removes the necessity to initialise server, plugin manager and services
* manager variables as well as providing an easy way to register listeners
*
* @author Ollie
*/
public abstract class AlbPlugin extends JavaPlugin {
protected Server server;
protected PluginManager pluginMgr;
protected ServicesManager servicesMgr;
public abstract void onEnable();
/**
* Initialises variables etc for this plugin. Should be called at the start
* of the onEnable() implementation in extensions of this class
*/
protected void init() {
server = getServer();
pluginMgr = server.getPluginManager();
servicesMgr = server.getServicesManager();
}
/**
* Registers the given listener to this JavaPlugin object
*
* @param listener The Listener to register
*/
protected void register(final Listener listener) {
pluginMgr.registerEvents(listener, this);
}
/**
* Registers the given CommandHandler to a command with the given name
*
* @param name The name to register the command to
* @param handler The CommandHandler to register for the command
*/
protected void register(final String name, final CommandHandler handler) {
Commands.registerCommand(this, name, handler);
}
/**
* Registers the given CommandHandler
*
* @param handler The CommandHandler to register
*/
protected void register(final CommandHandler handler) {
register(handler.getName(), handler);
}
}
|
Add utility command registration methods
|
Add utility command registration methods
|
Java
|
mit
|
simplyianm/albkit
|
java
|
## Code Before:
package pw.ian.albkit;
import org.bukkit.Server;
import org.bukkit.event.Listener;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.ServicesManager;
import org.bukkit.plugin.java.JavaPlugin;
/**
* A base plugin class which makes creating new plugins more convenient, as it
* removes the necessity to initialise server, plugin manager and services
* manager variables as well as providing an easy way to register listeners
*
* @author Ollie
*/
public abstract class AlbPlugin extends JavaPlugin {
protected Server server;
protected PluginManager pluginMgr;
protected ServicesManager servicesMgr;
public abstract void onEnable();
/**
* Initialises variables etc for this plugin. Should be called at the start
* of the onEnable() implementation in extensions of this class
*/
protected void init() {
server = getServer();
pluginMgr = server.getPluginManager();
servicesMgr = server.getServicesManager();
}
/**
* Registers the given listener to this JavaPlugin object
*
* @param listener The Listener to register
*/
protected void register(final Listener listener) {
pluginMgr.registerEvents(listener, this);
}
}
## Instruction:
Add utility command registration methods
## Code After:
package pw.ian.albkit;
import pw.ian.albkit.command.CommandHandler;
import pw.ian.albkit.command.Commands;
import org.bukkit.Server;
import org.bukkit.event.Listener;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.ServicesManager;
import org.bukkit.plugin.java.JavaPlugin;
/**
* A base plugin class which makes creating new plugins more convenient, as it
* removes the necessity to initialise server, plugin manager and services
* manager variables as well as providing an easy way to register listeners
*
* @author Ollie
*/
public abstract class AlbPlugin extends JavaPlugin {
protected Server server;
protected PluginManager pluginMgr;
protected ServicesManager servicesMgr;
public abstract void onEnable();
/**
* Initialises variables etc for this plugin. Should be called at the start
* of the onEnable() implementation in extensions of this class
*/
protected void init() {
server = getServer();
pluginMgr = server.getPluginManager();
servicesMgr = server.getServicesManager();
}
/**
* Registers the given listener to this JavaPlugin object
*
* @param listener The Listener to register
*/
protected void register(final Listener listener) {
pluginMgr.registerEvents(listener, this);
}
/**
* Registers the given CommandHandler to a command with the given name
*
* @param name The name to register the command to
* @param handler The CommandHandler to register for the command
*/
protected void register(final String name, final CommandHandler handler) {
Commands.registerCommand(this, name, handler);
}
/**
* Registers the given CommandHandler
*
* @param handler The CommandHandler to register
*/
protected void register(final CommandHandler handler) {
register(handler.getName(), handler);
}
}
|
...
package pw.ian.albkit;
import pw.ian.albkit.command.CommandHandler;
import pw.ian.albkit.command.Commands;
import org.bukkit.Server;
import org.bukkit.event.Listener;
...
pluginMgr.registerEvents(listener, this);
}
/**
* Registers the given CommandHandler to a command with the given name
*
* @param name The name to register the command to
* @param handler The CommandHandler to register for the command
*/
protected void register(final String name, final CommandHandler handler) {
Commands.registerCommand(this, name, handler);
}
/**
* Registers the given CommandHandler
*
* @param handler The CommandHandler to register
*/
protected void register(final CommandHandler handler) {
register(handler.getName(), handler);
}
}
...
|
4aa11073a551c8a026daea9175336b63dd9780b2
|
src/poliastro/twobody/events.py
|
src/poliastro/twobody/events.py
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
|
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
Add altitude cross event detector
|
Add altitude cross event detector
|
Python
|
mit
|
poliastro/poliastro
|
python
|
## Code Before:
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
return H - self._R
## Instruction:
Add altitude cross event detector
## Code After:
from astropy import units as u
from numpy.linalg import norm
class LithobrakeEvent:
"""Terminal event that detects impact with the attractor surface.
Parameters
----------
R : float
Radius of the attractor.
"""
def __init__(self, R):
self._R = R
self._last_t = None
@property
def terminal(self):
# Tell SciPy to stop the integration at H = R (impact)
return True
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
|
// ... existing code ...
self._last_t = t
H = norm(u[:3])
# SciPy will search for H - R = 0
print(H - self._R)
return H - self._R
class AltitudeCrossEvent:
"""Detect if a satellite crosses a specific threshold altitude.
Parameters
----------
R: ~astropy.units.Quantity
Radius of the attractor (km).
thresh_H: ~astropy.units.Quantity
Threshold altitude (in km), defaults to 100 km.
terminal: bool
Whether to terminate integration if this event occurs, defaults to True.
"""
def __init__(self, R, thresh_H=100*u.km, terminal=True):
self._R = R.to(u.km).value
self._thresh_H = thresh_H.to(u.km).value # Threshold height from the ground.
self._terminal = terminal
self._last_t = None
@property
def terminal(self):
# Orekit's API stops propagation when descending, but not when ascending.
return self._terminal
@property
def last_t(self):
return self._last_t * u.s
def __call__(self, t, u, k):
self._last_t = t
H = norm(u[:3])
# H is from the center of the attractor.
return H - self._R - self._thresh_H # If this goes from +ve to -ve, altitude is decreasing.
// ... rest of the code ...
|
6ca27fba516ddc63ad6bae98b20e5f9a42b37451
|
examples/plotting/file/image.py
|
examples/plotting/file/image.py
|
import numpy as np
from bokeh.plotting import *
from bokeh.objects import Range1d
N = 1000
x = np.linspace(0, 10, N)
y = np.linspace(0, 10, N)
xx, yy = np.meshgrid(x, y)
d = np.sin(xx)*np.cos(yy)
output_file("image.html", title="image.py example")
image(
image=[d], x=[0], y=[0], dw=[10], dh=[10], palette=["Spectral-11"],
x_range=[0, 10], y_range=[0, 10],
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
curplot().x_range = [5, 10]
show() # open a browser
|
import numpy as np
from bokeh.plotting import *
N = 1000
x = np.linspace(0, 10, N)
y = np.linspace(0, 10, N)
xx, yy = np.meshgrid(x, y)
d = np.sin(xx)*np.cos(yy)
output_file("image.html", title="image.py example")
image(
image=[d], x=[0], y=[0], dw=[10], dh=[10], palette=["Spectral-11"],
x_range=[0, 10], y_range=[0, 10],
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
show() # open a browser
|
Fix example and remove extraneous import.
|
Fix example and remove extraneous import.
|
Python
|
bsd-3-clause
|
birdsarah/bokeh,srinathv/bokeh,justacec/bokeh,eteq/bokeh,saifrahmed/bokeh,eteq/bokeh,rothnic/bokeh,dennisobrien/bokeh,deeplook/bokeh,draperjames/bokeh,tacaswell/bokeh,daodaoliang/bokeh,abele/bokeh,abele/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,ericdill/bokeh,timsnyder/bokeh,CrazyGuo/bokeh,ptitjano/bokeh,quasiben/bokeh,rothnic/bokeh,ericmjl/bokeh,rs2/bokeh,philippjfr/bokeh,ericmjl/bokeh,rhiever/bokeh,stonebig/bokeh,timothydmorton/bokeh,rs2/bokeh,azjps/bokeh,roxyboy/bokeh,aiguofer/bokeh,justacec/bokeh,draperjames/bokeh,stuart-knock/bokeh,paultcochrane/bokeh,aiguofer/bokeh,birdsarah/bokeh,awanke/bokeh,ChristosChristofidis/bokeh,roxyboy/bokeh,laurent-george/bokeh,ahmadia/bokeh,saifrahmed/bokeh,birdsarah/bokeh,mutirri/bokeh,jplourenco/bokeh,htygithub/bokeh,laurent-george/bokeh,canavandl/bokeh,daodaoliang/bokeh,bokeh/bokeh,ahmadia/bokeh,schoolie/bokeh,schoolie/bokeh,evidation-health/bokeh,maxalbert/bokeh,dennisobrien/bokeh,ChristosChristofidis/bokeh,saifrahmed/bokeh,roxyboy/bokeh,muku42/bokeh,phobson/bokeh,jakirkham/bokeh,josherick/bokeh,ericmjl/bokeh,eteq/bokeh,ptitjano/bokeh,mutirri/bokeh,muku42/bokeh,timsnyder/bokeh,lukebarnard1/bokeh,DuCorey/bokeh,percyfal/bokeh,phobson/bokeh,laurent-george/bokeh,bokeh/bokeh,draperjames/bokeh,canavandl/bokeh,carlvlewis/bokeh,xguse/bokeh,carlvlewis/bokeh,mutirri/bokeh,DuCorey/bokeh,lukebarnard1/bokeh,muku42/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,tacaswell/bokeh,xguse/bokeh,akloster/bokeh,PythonCharmers/bokeh,aavanian/bokeh,roxyboy/bokeh,evidation-health/bokeh,alan-unravel/bokeh,mindriot101/bokeh,aavanian/bokeh,msarahan/bokeh,aiguofer/bokeh,bsipocz/bokeh,caseyclements/bokeh,percyfal/bokeh,tacaswell/bokeh,PythonCharmers/bokeh,ChristosChristofidis/bokeh,ChinaQuants/bokeh,timothydmorton/bokeh,mutirri/bokeh,ericdill/bokeh,timsnyder/bokeh,KasperPRasmussen/bokeh,matbra/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,satishgoda/bokeh,josherick/bokeh,srinathv/bokeh,ahmadia/bokeh,caseyclements/bokeh,draperjames/bokeh,stonebig/bokeh,clairetang6/bokeh,almarklein/bokeh,carlvlewis/bokeh,almarklein/bokeh,laurent-george/bokeh,satishgoda/bokeh,ericdill/bokeh,srinathv/bokeh,philippjfr/bokeh,deeplook/bokeh,msarahan/bokeh,timsnyder/bokeh,justacec/bokeh,rothnic/bokeh,aiguofer/bokeh,azjps/bokeh,DuCorey/bokeh,azjps/bokeh,PythonCharmers/bokeh,percyfal/bokeh,awanke/bokeh,bsipocz/bokeh,maxalbert/bokeh,almarklein/bokeh,dennisobrien/bokeh,bokeh/bokeh,DuCorey/bokeh,CrazyGuo/bokeh,matbra/bokeh,rs2/bokeh,josherick/bokeh,ericdill/bokeh,dennisobrien/bokeh,ChinaQuants/bokeh,clairetang6/bokeh,muku42/bokeh,stuart-knock/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,khkaminska/bokeh,paultcochrane/bokeh,khkaminska/bokeh,xguse/bokeh,ChristosChristofidis/bokeh,jplourenco/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,azjps/bokeh,abele/bokeh,msarahan/bokeh,maxalbert/bokeh,rs2/bokeh,alan-unravel/bokeh,awanke/bokeh,paultcochrane/bokeh,philippjfr/bokeh,timothydmorton/bokeh,htygithub/bokeh,quasiben/bokeh,rhiever/bokeh,evidation-health/bokeh,rhiever/bokeh,ericmjl/bokeh,msarahan/bokeh,timsnyder/bokeh,deeplook/bokeh,saifrahmed/bokeh,philippjfr/bokeh,xguse/bokeh,rhiever/bokeh,KasperPRasmussen/bokeh,tacaswell/bokeh,jplourenco/bokeh,phobson/bokeh,clairetang6/bokeh,ptitjano/bokeh,aavanian/bokeh,matbra/bokeh,Karel-van-de-Plassche/bokeh,bsipocz/bokeh,KasperPRasmussen/bokeh,Karel-van-de-Plassche/bokeh,ericmjl/bokeh,draperjames/bokeh,DuCorey/bokeh,justacec/bokeh,ChinaQuants/bokeh,eteq/bokeh,jakirkham/bokeh,PythonCharmers/bokeh,schoolie/bokeh,akloster/bokeh,htygithub/bokeh,lukebarnard1/bokeh,bsipocz/bokeh,schoolie/bokeh,azjps/bokeh,caseyclements/bokeh,akloster/bokeh,stuart-knock/bokeh,stuart-knock/bokeh,mindriot101/bokeh,CrazyGuo/bokeh,abele/bokeh,khkaminska/bokeh,maxalbert/bokeh,rs2/bokeh,CrazyGuo/bokeh,jakirkham/bokeh,bokeh/bokeh,khkaminska/bokeh,quasiben/bokeh,satishgoda/bokeh,clairetang6/bokeh,canavandl/bokeh,daodaoliang/bokeh,ahmadia/bokeh,gpfreitas/bokeh,josherick/bokeh,philippjfr/bokeh,phobson/bokeh,dennisobrien/bokeh,matbra/bokeh,satishgoda/bokeh,ptitjano/bokeh,akloster/bokeh,percyfal/bokeh,caseyclements/bokeh,htygithub/bokeh,canavandl/bokeh,carlvlewis/bokeh,paultcochrane/bokeh,mindriot101/bokeh,schoolie/bokeh,lukebarnard1/bokeh,gpfreitas/bokeh,gpfreitas/bokeh,alan-unravel/bokeh,evidation-health/bokeh,timothydmorton/bokeh,aavanian/bokeh,gpfreitas/bokeh,jakirkham/bokeh,awanke/bokeh,rothnic/bokeh,aiguofer/bokeh,stonebig/bokeh,jplourenco/bokeh,stonebig/bokeh,deeplook/bokeh,jakirkham/bokeh,alan-unravel/bokeh,mindriot101/bokeh,birdsarah/bokeh,srinathv/bokeh,daodaoliang/bokeh
|
python
|
## Code Before:
import numpy as np
from bokeh.plotting import *
from bokeh.objects import Range1d
N = 1000
x = np.linspace(0, 10, N)
y = np.linspace(0, 10, N)
xx, yy = np.meshgrid(x, y)
d = np.sin(xx)*np.cos(yy)
output_file("image.html", title="image.py example")
image(
image=[d], x=[0], y=[0], dw=[10], dh=[10], palette=["Spectral-11"],
x_range=[0, 10], y_range=[0, 10],
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
curplot().x_range = [5, 10]
show() # open a browser
## Instruction:
Fix example and remove extraneous import.
## Code After:
import numpy as np
from bokeh.plotting import *
N = 1000
x = np.linspace(0, 10, N)
y = np.linspace(0, 10, N)
xx, yy = np.meshgrid(x, y)
d = np.sin(xx)*np.cos(yy)
output_file("image.html", title="image.py example")
image(
image=[d], x=[0], y=[0], dw=[10], dh=[10], palette=["Spectral-11"],
x_range=[0, 10], y_range=[0, 10],
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
show() # open a browser
|
...
import numpy as np
from bokeh.plotting import *
N = 1000
...
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
show() # open a browser
...
|
d95eda2f88a8b493e40cd6628c7e532a1f510610
|
src/dashboard/src/main/urls.py
|
src/dashboard/src/main/urls.py
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
# Index
(r'', redirect_to, {'url': '/ingest/'}),
)
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Index
(r'^$', redirect_to, {'url': '/ingest/'}),
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
)
|
Remove default route because it is not the desired behavior.
|
Remove default route because it is not the desired behavior.
Autoconverted from SVN (revision:1409)
|
Python
|
agpl-3.0
|
artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history
|
python
|
## Code Before:
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
# Index
(r'', redirect_to, {'url': '/ingest/'}),
)
## Instruction:
Remove default route because it is not the desired behavior.
Autoconverted from SVN (revision:1409)
## Code After:
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.generic.simple import direct_to_template, redirect_to
UUID_REGEX = '[\w]{8}(-[\w]{4}){3}-[\w]{12}'
urlpatterns = patterns('dashboard.main.views',
# Index
(r'^$', redirect_to, {'url': '/ingest/'}),
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
)
|
// ... existing code ...
urlpatterns = patterns('dashboard.main.views',
# Index
(r'^$', redirect_to, {'url': '/ingest/'}),
# Ingest
url(r'ingest/$', direct_to_template, {'template': 'main/ingest.html', 'extra_context': {'polling_interval': settings.POLLING_INTERVAL, 'microservices_help': settings.MICROSERVICES_HELP}}, 'ingest'),
(r'ingest/go/$', 'ingest'),
(r'ingest/go/(?P<uuid>' + UUID_REGEX + ')$', 'ingest'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/explore/$', 'explore'),
(r'jobs/(?P<uuid>' + UUID_REGEX + ')/list-objects/$', 'list_objects'),
(r'tasks/(?P<uuid>' + UUID_REGEX + ')/$', 'tasks'),
# Preservatin planning
(r'preservation-planning/$', 'preservation_planning'),
)
// ... rest of the code ...
|
9c50c5e028dd4d8a7a51e142d35fcc45b1b60f58
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='pyspotify_helper',
version='0.0.1',
author='Matt Wismer',
author_email='[email protected]',
description='Simplest integration of Spotify into Python',
license='MIT',
packages=['pyspotify_helper'],
url='https://github.com/MattWis/pyspotify_helper.git',
install_requires=['pyspotify'])
|
from setuptools import setup
setup(name='pyspotify_helper',
version='0.0.1',
author='Matt Wismer',
author_email='[email protected]',
description='Simplest integration of Spotify into Python',
license='MIT',
packages=['pyspotify_helper'],
package_dir={'pyspotify_helper': 'pyspotify_helper'},
url='https://github.com/MattWis/pyspotify_helper.git',
install_requires=['pyspotify'])
|
Use a queue to manage playing tracks
|
Use a queue to manage playing tracks
|
Python
|
mit
|
MattWis/pyspotify_helper
|
python
|
## Code Before:
from setuptools import setup
setup(name='pyspotify_helper',
version='0.0.1',
author='Matt Wismer',
author_email='[email protected]',
description='Simplest integration of Spotify into Python',
license='MIT',
packages=['pyspotify_helper'],
url='https://github.com/MattWis/pyspotify_helper.git',
install_requires=['pyspotify'])
## Instruction:
Use a queue to manage playing tracks
## Code After:
from setuptools import setup
setup(name='pyspotify_helper',
version='0.0.1',
author='Matt Wismer',
author_email='[email protected]',
description='Simplest integration of Spotify into Python',
license='MIT',
packages=['pyspotify_helper'],
package_dir={'pyspotify_helper': 'pyspotify_helper'},
url='https://github.com/MattWis/pyspotify_helper.git',
install_requires=['pyspotify'])
|
// ... existing code ...
description='Simplest integration of Spotify into Python',
license='MIT',
packages=['pyspotify_helper'],
package_dir={'pyspotify_helper': 'pyspotify_helper'},
url='https://github.com/MattWis/pyspotify_helper.git',
install_requires=['pyspotify'])
// ... rest of the code ...
|
66c0b220188499a5871ee1fbe5b79f0a57db4ec9
|
feder/tasks/filters.py
|
feder/tasks/filters.py
|
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
|
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
|
Add is_done filter for task
|
Add is_done filter for task
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
python
|
## Code Before:
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
## Instruction:
Add is_done filter for task
## Code After:
from atom.filters import CrispyFilterMixin, AutocompleteChoiceFilter
from django.utils.translation import ugettext_lazy as _
import django_filters
from .models import Task
class TaskFilter(CrispyFilterMixin, django_filters.FilterSet):
case = AutocompleteChoiceFilter('CaseAutocomplete')
questionary = AutocompleteChoiceFilter('QuestionaryAutocomplete')
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
super(TaskFilter, self).__init__(*args, **kwargs)
self.filters['name'].lookup_type = 'icontains'
class Meta:
model = Task
fields = ['name', 'case', 'questionary', 'case__institution', ]
order_by = ['created', ]
|
# ... existing code ...
case__institution = AutocompleteChoiceFilter('InstitutionAutocomplete')
case__monitoring = AutocompleteChoiceFilter('MonitoringAutocomplete')
created = django_filters.DateRangeFilter(label=_("Creation date"))
done = django_filters.BooleanFilter(label=_("Is done?"),
action=lambda qs, v: qs.is_done(exclude=not v))
form_class = None
def __init__(self, *args, **kwargs):
# ... rest of the code ...
|
ed032d6e1a01ce8ad2af773e71113a5c3a647a83
|
app/src/main/java/fr/coding/tools/AutoAuthSslWebView.java
|
app/src/main/java/fr/coding/tools/AutoAuthSslWebView.java
|
package fr.coding.tools;
import android.webkit.HttpAuthHandler;
import android.webkit.WebView;
import java.util.List;
import fr.coding.tools.model.HostAuth;
import fr.coding.tools.model.SslByPass;
/**
* Created by Matthieu on 03/10/2015.
*/
public class AutoAuthSslWebView extends SslWebView {
public CallbackResult<HostAuth, HostAuth> AuthAsked;
protected List<HostAuth> allowedHosts;
public void setAllowedHosts(List<HostAuth> hosts) {
allowedHosts = hosts;
}
public List<HostAuth> getAllowedHosts() {
return allowedHosts;
}
@Override
public void onReceivedHttpAuthRequest(WebView webView, HttpAuthHandler handler, String host, String realm) {
if (allowedHosts != null) {
for (HostAuth ha :
allowedHosts) {
if (host.equals(ha.Host)) {
handler.proceed(ha.Login, ha.Password);
return;
}
}
}
if (AuthAsked != null) {
HostAuth hostAuth = new HostAuth();
hostAuth.Host = host;
HostAuth ret = AuthAsked.onCallback(hostAuth);
if (ret != null) {
handler.proceed(ret.Login, ret.Password);
return;
}
}
super.onReceivedHttpAuthRequest(webView, handler, host, realm);
}
}
|
package fr.coding.tools;
import android.webkit.HttpAuthHandler;
import android.webkit.WebView;
import java.util.Date;
import java.util.List;
import fr.coding.tools.model.HostAuth;
import fr.coding.tools.model.SslByPass;
/**
* Created by Matthieu on 03/10/2015.
*/
public class AutoAuthSslWebView extends SslWebView {
public CallbackResult<HostAuth, HostAuth> AuthAsked;
protected List<HostAuth> allowedHosts;
public void setAllowedHosts(List<HostAuth> hosts) {
allowedHosts = hosts;
}
public List<HostAuth> getAllowedHosts() {
return allowedHosts;
}
public long AutoTestedDate = 0;
@Override
public void onReceivedHttpAuthRequest(WebView webView, HttpAuthHandler handler, String host, String realm) {
if ((allowedHosts != null)&&(AutoTestedDate < (new Date().getTime()-(15000)))) {
for (HostAuth ha :
allowedHosts) {
if (host.equals(ha.Host)) {
AutoTestedDate = new Date().getTime();
handler.proceed(ha.Login, ha.Password);
return;
}
}
}
if (AuthAsked != null) {
HostAuth hostAuth = new HostAuth();
hostAuth.Host = host;
HostAuth ret = AuthAsked.onCallback(hostAuth);
if (ret != null) {
handler.proceed(ret.Login, ret.Password);
return;
}
}
super.onReceivedHttpAuthRequest(webView, handler, host, realm);
}
}
|
Correct cycling bug when password is updated server side
|
Correct cycling bug when password is updated server side
|
Java
|
apache-2.0
|
mtudury/yourandroidwebapp,mtudury/yourandroidwebapp
|
java
|
## Code Before:
package fr.coding.tools;
import android.webkit.HttpAuthHandler;
import android.webkit.WebView;
import java.util.List;
import fr.coding.tools.model.HostAuth;
import fr.coding.tools.model.SslByPass;
/**
* Created by Matthieu on 03/10/2015.
*/
public class AutoAuthSslWebView extends SslWebView {
public CallbackResult<HostAuth, HostAuth> AuthAsked;
protected List<HostAuth> allowedHosts;
public void setAllowedHosts(List<HostAuth> hosts) {
allowedHosts = hosts;
}
public List<HostAuth> getAllowedHosts() {
return allowedHosts;
}
@Override
public void onReceivedHttpAuthRequest(WebView webView, HttpAuthHandler handler, String host, String realm) {
if (allowedHosts != null) {
for (HostAuth ha :
allowedHosts) {
if (host.equals(ha.Host)) {
handler.proceed(ha.Login, ha.Password);
return;
}
}
}
if (AuthAsked != null) {
HostAuth hostAuth = new HostAuth();
hostAuth.Host = host;
HostAuth ret = AuthAsked.onCallback(hostAuth);
if (ret != null) {
handler.proceed(ret.Login, ret.Password);
return;
}
}
super.onReceivedHttpAuthRequest(webView, handler, host, realm);
}
}
## Instruction:
Correct cycling bug when password is updated server side
## Code After:
package fr.coding.tools;
import android.webkit.HttpAuthHandler;
import android.webkit.WebView;
import java.util.Date;
import java.util.List;
import fr.coding.tools.model.HostAuth;
import fr.coding.tools.model.SslByPass;
/**
* Created by Matthieu on 03/10/2015.
*/
public class AutoAuthSslWebView extends SslWebView {
public CallbackResult<HostAuth, HostAuth> AuthAsked;
protected List<HostAuth> allowedHosts;
public void setAllowedHosts(List<HostAuth> hosts) {
allowedHosts = hosts;
}
public List<HostAuth> getAllowedHosts() {
return allowedHosts;
}
public long AutoTestedDate = 0;
@Override
public void onReceivedHttpAuthRequest(WebView webView, HttpAuthHandler handler, String host, String realm) {
if ((allowedHosts != null)&&(AutoTestedDate < (new Date().getTime()-(15000)))) {
for (HostAuth ha :
allowedHosts) {
if (host.equals(ha.Host)) {
AutoTestedDate = new Date().getTime();
handler.proceed(ha.Login, ha.Password);
return;
}
}
}
if (AuthAsked != null) {
HostAuth hostAuth = new HostAuth();
hostAuth.Host = host;
HostAuth ret = AuthAsked.onCallback(hostAuth);
if (ret != null) {
handler.proceed(ret.Login, ret.Password);
return;
}
}
super.onReceivedHttpAuthRequest(webView, handler, host, realm);
}
}
|
// ... existing code ...
import android.webkit.HttpAuthHandler;
import android.webkit.WebView;
import java.util.Date;
import java.util.List;
import fr.coding.tools.model.HostAuth;
import fr.coding.tools.model.SslByPass;
// ... modified code ...
return allowedHosts;
}
public long AutoTestedDate = 0;
@Override
public void onReceivedHttpAuthRequest(WebView webView, HttpAuthHandler handler, String host, String realm) {
if ((allowedHosts != null)&&(AutoTestedDate < (new Date().getTime()-(15000)))) {
for (HostAuth ha :
allowedHosts) {
if (host.equals(ha.Host)) {
AutoTestedDate = new Date().getTime();
handler.proceed(ha.Login, ha.Password);
return;
}
// ... rest of the code ...
|
918002b7cc42d465dc80d2313e31d8fbfeef3712
|
include/ethdrivers/intel.h
|
include/ethdrivers/intel.h
|
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(NICTA_GPL)
*/
#include <platsupport/io.h>
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82580_init(ps_io_ops_t io_ops, void *bar0);
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82574_init(ps_io_ops_t io_ops, void *bar0);
|
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(NICTA_GPL)
*/
#ifndef ETHIF_INTEL_H
#define ETHIF_INTEL_H
#include <platsupport/io.h>
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82580_init(ps_io_ops_t io_ops, void *bar0);
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82574_init(ps_io_ops_t io_ops, void *bar0);
#endif
|
Add missing header file guards
|
Add missing header file guards
|
C
|
bsd-2-clause
|
agacek/util_libs,agacek/util_libs,agacek/util_libs,agacek/util_libs
|
c
|
## Code Before:
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(NICTA_GPL)
*/
#include <platsupport/io.h>
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82580_init(ps_io_ops_t io_ops, void *bar0);
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82574_init(ps_io_ops_t io_ops, void *bar0);
## Instruction:
Add missing header file guards
## Code After:
/*
* Copyright 2014, NICTA
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(NICTA_GPL)
*/
#ifndef ETHIF_INTEL_H
#define ETHIF_INTEL_H
#include <platsupport/io.h>
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82580_init(ps_io_ops_t io_ops, void *bar0);
/**
* This function initialises the hardware
* @param[in] io_ops A structure containing os specific data and
* functions.
* @param[in] bar0 Where pci bar0 has been mapped into our vspace
* @return A reference to the ethernet drivers state.
*/
struct eth_driver*
ethif_e82574_init(ps_io_ops_t io_ops, void *bar0);
#endif
|
// ... existing code ...
*
* @TAG(NICTA_GPL)
*/
#ifndef ETHIF_INTEL_H
#define ETHIF_INTEL_H
#include <platsupport/io.h>
// ... modified code ...
*/
struct eth_driver*
ethif_e82574_init(ps_io_ops_t io_ops, void *bar0);
#endif
// ... rest of the code ...
|
3beffa750d68c2104b740193f0386be464829a1a
|
libpb/__init__.py
|
libpb/__init__.py
|
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
exit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
raise SystemExit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
Use SystemExit, not exit() to initiate a shutdown.
|
Use SystemExit, not exit() to initiate a shutdown.
exit() has unintented side affects, such as closing stdin, that are
undesired as stdin is assumed to be writable while libpb/event/run
unwinds (i.e. Top monitor).
|
Python
|
bsd-2-clause
|
DragonSA/portbuilder,DragonSA/portbuilder
|
python
|
## Code Before:
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
exit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
## Instruction:
Use SystemExit, not exit() to initiate a shutdown.
exit() has unintented side affects, such as closing stdin, that are
undesired as stdin is assumed to be writable while libpb/event/run
unwinds (i.e. Top monitor).
## Code After:
"""FreeBSD port building infrastructure."""
from __future__ import absolute_import
from . import event
def stop(kill=False, kill_clean=False):
"""Stop building ports and cleanup."""
from os import killpg
from signal import SIGTERM, SIGKILL
from .builder import builders
from .env import cpus, flags
from .queue import attr_queue, clean_queue, queues
from .subprocess import children
if flags["no_op"]:
raise SystemExit(254)
flags["mode"] = "clean"
if kill_clean:
cleaning = ()
else:
cleaning = set(i.pid for i in clean_queue.active)
# Kill all active children
for pid in children():
if pid not in cleaning:
try:
killpg(pid, SIGKILL if kill else SIGTERM)
except OSError:
pass
# Stop all queues
attr_queue.load = 0
for queue in queues:
queue.load = 0
# Make cleaning go a bit faster
if kill_clean:
clean_queue.load = 0
return
else:
clean_queue.load = cpus
# Wait for all active ports to finish so that they may be cleaned
active = set()
for queue in queues:
for job in queue.active:
port = job.port
active.add(port)
port.stage_completed.connect(lambda x: x.clean())
# Clean all other outstanding ports
for builder in builders:
for port in builder.ports:
if port not in active:
port.clean()
|
// ... existing code ...
from .subprocess import children
if flags["no_op"]:
raise SystemExit(254)
flags["mode"] = "clean"
// ... rest of the code ...
|
7d730f982db77545ce64513b843902576ea4746e
|
bletia/src/main/java/info/izumin/android/bletia/BleMessageThread.java
|
bletia/src/main/java/info/izumin/android/bletia/BleMessageThread.java
|
package info.izumin.android.bletia;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import org.jdeferred.Promise;
import info.izumin.android.bletia.action.Action;
import info.izumin.android.bletia.wrapper.BluetoothGattWrapper;
/**
* Created by izumin on 9/14/15.
*/
public class BleMessageThread extends Handler {
private static final int DELAY_MILLIS = 10;
private final HandlerThread mHandlerThread;
private final BluetoothGattWrapper mGattWrapper;
private final BleActionStore mActionStore;
public BleMessageThread(HandlerThread handlerThread, BluetoothGattWrapper gattWrapper, BleActionStore actionStore) {
super(handlerThread.getLooper());
mHandlerThread = handlerThread;
mGattWrapper = gattWrapper;
mActionStore = actionStore;
}
public void stop() {
mHandlerThread.quitSafely();
}
public <T> Promise<T, BletiaException, Object> execute(Action<T> action) {
mActionStore.enqueue(action);
dispatchMessage(action.obtainMessage());
return action.getDeferred().promise();
}
@Override
public void handleMessage(Message msg) {
Action.Type type = Action.Type.valueOf(msg.what);
if (mActionStore.isRunning(type)) {
sendMessageDelayed(msg, DELAY_MILLIS);
} else {
mActionStore.execute(type, mGattWrapper);
}
}
}
|
package info.izumin.android.bletia;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import org.jdeferred.Promise;
import info.izumin.android.bletia.action.Action;
import info.izumin.android.bletia.wrapper.BluetoothGattWrapper;
/**
* Created by izumin on 9/14/15.
*/
public class BleMessageThread extends Handler {
private static final int DELAY_MILLIS = 10;
private final HandlerThread mHandlerThread;
private final BluetoothGattWrapper mGattWrapper;
private final BleActionStore mActionStore;
public BleMessageThread(HandlerThread handlerThread, BluetoothGattWrapper gattWrapper, BleActionStore actionStore) {
super(handlerThread.getLooper());
mHandlerThread = handlerThread;
mGattWrapper = gattWrapper;
mActionStore = actionStore;
}
public void stop() {
mHandlerThread.quitSafely();
}
public <T> Promise<T, BletiaException, Object> execute(Action<T> action) {
mActionStore.enqueue(action);
dispatchMessage(action.obtainMessage());
return action.getDeferred().promise();
}
@Override
public void handleMessage(Message msg) {
Action.Type type = Action.Type.valueOf(msg.what);
if (mActionStore.isRunning(type)) {
Message delayed = obtainMessage();
delayed.copyFrom(msg);
sendMessageDelayed(delayed, DELAY_MILLIS);
} else {
mActionStore.execute(type, mGattWrapper);
}
}
}
|
Fix send message delayed when the message has conflict
|
Fix send message delayed when the message has conflict
|
Java
|
apache-2.0
|
izumin5210/Bletia
|
java
|
## Code Before:
package info.izumin.android.bletia;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import org.jdeferred.Promise;
import info.izumin.android.bletia.action.Action;
import info.izumin.android.bletia.wrapper.BluetoothGattWrapper;
/**
* Created by izumin on 9/14/15.
*/
public class BleMessageThread extends Handler {
private static final int DELAY_MILLIS = 10;
private final HandlerThread mHandlerThread;
private final BluetoothGattWrapper mGattWrapper;
private final BleActionStore mActionStore;
public BleMessageThread(HandlerThread handlerThread, BluetoothGattWrapper gattWrapper, BleActionStore actionStore) {
super(handlerThread.getLooper());
mHandlerThread = handlerThread;
mGattWrapper = gattWrapper;
mActionStore = actionStore;
}
public void stop() {
mHandlerThread.quitSafely();
}
public <T> Promise<T, BletiaException, Object> execute(Action<T> action) {
mActionStore.enqueue(action);
dispatchMessage(action.obtainMessage());
return action.getDeferred().promise();
}
@Override
public void handleMessage(Message msg) {
Action.Type type = Action.Type.valueOf(msg.what);
if (mActionStore.isRunning(type)) {
sendMessageDelayed(msg, DELAY_MILLIS);
} else {
mActionStore.execute(type, mGattWrapper);
}
}
}
## Instruction:
Fix send message delayed when the message has conflict
## Code After:
package info.izumin.android.bletia;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import org.jdeferred.Promise;
import info.izumin.android.bletia.action.Action;
import info.izumin.android.bletia.wrapper.BluetoothGattWrapper;
/**
* Created by izumin on 9/14/15.
*/
public class BleMessageThread extends Handler {
private static final int DELAY_MILLIS = 10;
private final HandlerThread mHandlerThread;
private final BluetoothGattWrapper mGattWrapper;
private final BleActionStore mActionStore;
public BleMessageThread(HandlerThread handlerThread, BluetoothGattWrapper gattWrapper, BleActionStore actionStore) {
super(handlerThread.getLooper());
mHandlerThread = handlerThread;
mGattWrapper = gattWrapper;
mActionStore = actionStore;
}
public void stop() {
mHandlerThread.quitSafely();
}
public <T> Promise<T, BletiaException, Object> execute(Action<T> action) {
mActionStore.enqueue(action);
dispatchMessage(action.obtainMessage());
return action.getDeferred().promise();
}
@Override
public void handleMessage(Message msg) {
Action.Type type = Action.Type.valueOf(msg.what);
if (mActionStore.isRunning(type)) {
Message delayed = obtainMessage();
delayed.copyFrom(msg);
sendMessageDelayed(delayed, DELAY_MILLIS);
} else {
mActionStore.execute(type, mGattWrapper);
}
}
}
|
...
Action.Type type = Action.Type.valueOf(msg.what);
if (mActionStore.isRunning(type)) {
Message delayed = obtainMessage();
delayed.copyFrom(msg);
sendMessageDelayed(delayed, DELAY_MILLIS);
} else {
mActionStore.execute(type, mGattWrapper);
}
...
|
7d02bd555d7519d485d00e02136d26a6e4e7096e
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
nova/db/sqlalchemy/migrate_repo/versions/034_change_instance_id_in_migrations.py
|
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
Drop FK before dropping instance_id column.
|
Drop FK before dropping instance_id column.
|
Python
|
apache-2.0
|
sacharya/nova,jianghuaw/nova,leilihh/novaha,eneabio/nova,vladikr/nova_drafts,KarimAllah/nova,sileht/deb-openstack-nova,Stavitsky/nova,DirectXMan12/nova-hacking,akash1808/nova_test_latest,raildo/nova,gspilio/nova,tangfeixiong/nova,jianghuaw/nova,Juniper/nova,JioCloud/nova,zhimin711/nova,usc-isi/nova,orbitfp7/nova,JianyuWang/nova,vmturbo/nova,sebrandon1/nova,jeffrey4l/nova,Francis-Liu/animated-broccoli,psiwczak/openstack,MountainWei/nova,tianweizhang/nova,yrobla/nova,maelnor/nova,whitepages/nova,maoy/zknova,joker946/nova,russellb/nova,iuliat/nova,qwefi/nova,rahulunair/nova,berrange/nova,sileht/deb-openstack-nova,mahak/nova,fnordahl/nova,sridevikoushik31/openstack,Metaswitch/calico-nova,gooddata/openstack-nova,sebrandon1/nova,redhat-openstack/nova,eayunstack/nova,mandeepdhami/nova,tealover/nova,eharney/nova,yrobla/nova,CEG-FYP-OpenStack/scheduler,TieWei/nova,maelnor/nova,TwinkleChawla/nova,KarimAllah/nova,cloudbau/nova,isyippee/nova,mikalstill/nova,hanlind/nova,mgagne/nova,badock/nova,qwefi/nova,paulmathews/nova,kimjaejoong/nova,spring-week-topos/nova-week,plumgrid/plumgrid-nova,alaski/nova,petrutlucian94/nova,thomasem/nova,barnsnake351/nova,cernops/nova,akash1808/nova,Triv90/Nova,yrobla/nova,watonyweng/nova,akash1808/nova_test_latest,NoBodyCam/TftpPxeBootBareMetal,Tehsmash/nova,Juniper/nova,iuliat/nova,orbitfp7/nova,alexandrucoman/vbox-nova-driver,aristanetworks/arista-ovs-nova,fnordahl/nova,cernops/nova,zaina/nova,projectcalico/calico-nova,russellb/nova,apporc/nova,j-carpentier/nova,shahar-stratoscale/nova,DirectXMan12/nova-hacking,tealover/nova,vmturbo/nova,rahulunair/nova,JianyuWang/nova,varunarya10/nova_test_latest,imsplitbit/nova,klmitch/nova,silenceli/nova,NewpTone/stacklab-nova,apporc/nova,devendermishrajio/nova_test_latest,dawnpower/nova,alvarolopez/nova,felixma/nova,saleemjaveds/https-github.com-openstack-nova,adelina-t/nova,angdraug/nova,mikalstill/nova,akash1808/nova,Yuriy-Leonov/nova,CiscoSystems/nova,klmitch/nova,watonyweng/nova,devoid/nova,bgxavier/nova,citrix-openstack-build/nova,psiwczak/openstack,nikesh-mahalka/nova,sridevikoushik31/nova,CiscoSystems/nova,joker946/nova,JioCloud/nova,salv-orlando/MyRepo,rrader/nova-docker-plugin,kimjaejoong/nova,rickerc/nova_audit,savi-dev/nova,sridevikoushik31/nova,hanlind/nova,DirectXMan12/nova-hacking,blueboxgroup/nova,JioCloud/nova_test_latest,eonpatapon/nova,luogangyi/bcec-nova,belmiromoreira/nova,fajoy/nova,rickerc/nova_audit,double12gzh/nova,sileht/deb-openstack-nova,cloudbase/nova,eayunstack/nova,NeCTAR-RC/nova,aristanetworks/arista-ovs-nova,CCI-MOC/nova,sridevikoushik31/openstack,silenceli/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,virtualopensystems/nova,Juniper/nova,devendermishrajio/nova,tudorvio/nova,edulramirez/nova,bgxavier/nova,cyx1231st/nova,shootstar/novatest,varunarya10/nova_test_latest,maheshp/novatest,cernops/nova,imsplitbit/nova,maheshp/novatest,russellb/nova,josephsuh/extra-specs,mahak/nova,mgagne/nova,plumgrid/plumgrid-nova,gspilio/nova,sridevikoushik31/nova,luogangyi/bcec-nova,NoBodyCam/TftpPxeBootBareMetal,alaski/nova,cloudbau/nova,CloudServer/nova,bigswitch/nova,houshengbo/nova_vmware_compute_driver,dawnpower/nova,rajalokan/nova,belmiromoreira/nova,virtualopensystems/nova,saleemjaveds/https-github.com-openstack-nova,bclau/nova,eonpatapon/nova,Juniper/nova,citrix-openstack-build/nova,j-carpentier/nova,sacharya/nova,zhimin711/nova,Yusuke1987/openstack_template,angdraug/nova,mmnelemane/nova,eneabio/nova,cloudbase/nova,klmitch/nova,vmturbo/nova,openstack/nova,zaina/nova,edulramirez/nova,eharney/nova,josephsuh/extra-specs,cloudbase/nova,shail2810/nova,jianghuaw/nova,Triv90/Nova,NeCTAR-RC/nova,viggates/nova,zzicewind/nova,LoHChina/nova,vmturbo/nova,spring-week-topos/nova-week,noironetworks/nova,rajalokan/nova,openstack/nova,berrange/nova,takeshineshiro/nova,eneabio/nova,cloudbase/nova-virtualbox,felixma/nova,fajoy/nova,whitepages/nova,usc-isi/extra-specs,psiwczak/openstack,ruslanloman/nova,isyippee/nova,ruslanloman/nova,petrutlucian94/nova_dev,dstroppa/openstack-smartos-nova-grizzly,shahar-stratoscale/nova,bclau/nova,josephsuh/extra-specs,SUSE-Cloud/nova,vladikr/nova_drafts,noironetworks/nova,fajoy/nova,ntt-sic/nova,maoy/zknova,Francis-Liu/animated-broccoli,BeyondTheClouds/nova,blueboxgroup/nova,LoHChina/nova,cloudbase/nova-virtualbox,Triv90/Nova,jianghuaw/nova,SUSE-Cloud/nova,leilihh/novaha,devoid/nova,salv-orlando/MyRepo,Yuriy-Leonov/nova,jeffrey4l/nova,NewpTone/stacklab-nova,tangfeixiong/nova,zzicewind/nova,houshengbo/nova_vmware_compute_driver,yosshy/nova,BeyondTheClouds/nova,sridevikoushik31/openstack,aristanetworks/arista-ovs-nova,maheshp/novatest,OpenAcademy-OpenStack/nova-scheduler,mandeepdhami/nova,phenoxim/nova,paulmathews/nova,usc-isi/nova,TwinkleChawla/nova,mikalstill/nova,Metaswitch/calico-nova,ntt-sic/nova,KarimAllah/nova,houshengbo/nova_vmware_compute_driver,projectcalico/calico-nova,CloudServer/nova,savi-dev/nova,usc-isi/extra-specs,tanglei528/nova,yatinkumbhare/openstack-nova,tianweizhang/nova,Stavitsky/nova,gooddata/openstack-nova,redhat-openstack/nova,Yusuke1987/openstack_template,sridevikoushik31/nova,yatinkumbhare/openstack-nova,tanglei528/nova,leilihh/nova,dstroppa/openstack-smartos-nova-grizzly,klmitch/nova,mahak/nova,sebrandon1/nova,bigswitch/nova,rajalokan/nova,rrader/nova-docker-plugin,leilihh/nova,raildo/nova,mmnelemane/nova,ewindisch/nova,dstroppa/openstack-smartos-nova-grizzly,gooddata/openstack-nova,Tehsmash/nova,JioCloud/nova_test_latest,petrutlucian94/nova_dev,ted-gould/nova,rahulunair/nova,dims/nova,badock/nova,yosshy/nova,MountainWei/nova,scripnichenko/nova,double12gzh/nova,gspilio/nova,OpenAcademy-OpenStack/nova-scheduler,nikesh-mahalka/nova,shootstar/novatest,savi-dev/nova,scripnichenko/nova,gooddata/openstack-nova,devendermishrajio/nova_test_latest,usc-isi/extra-specs,CCI-MOC/nova,takeshineshiro/nova,usc-isi/nova,NewpTone/stacklab-nova,dims/nova,adelina-t/nova,phenoxim/nova,shail2810/nova,cyx1231st/nova,alvarolopez/nova,alexandrucoman/vbox-nova-driver,barnsnake351/nova,openstack/nova,affo/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,affo/nova,maoy/zknova,thomasem/nova,ted-gould/nova,petrutlucian94/nova,viggates/nova,CEG-FYP-OpenStack/scheduler,TieWei/nova,salv-orlando/MyRepo,hanlind/nova,rajalokan/nova,NoBodyCam/TftpPxeBootBareMetal,ewindisch/nova,BeyondTheClouds/nova,devendermishrajio/nova,paulmathews/nova,tudorvio/nova
|
python
|
## Code Before:
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
## Instruction:
Drop FK before dropping instance_id column.
## Code After:
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
#
# Tables to alter
#
#
instance_id = Column('instance_id', Integer())
instance_uuid = Column('instance_uuid', String(255))
def upgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
def downgrade(migrate_engine):
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.c.instance_uuid.drop()
migrations.create_column(instance_id)
|
# ... existing code ...
from sqlalchemy import Column, Integer, String, MetaData, Table
meta = MetaData()
# ... modified code ...
meta.bind = migrate_engine
migrations = Table('migrations', meta, autoload=True)
migrations.create_column(instance_uuid)
if migrate_engine.name == "mysql":
migrate_engine.execute("ALTER TABLE migrations DROP FOREIGN KEY " \
"`migrations_ibfk_1`;")
migrations.c.instance_id.drop()
# ... rest of the code ...
|
7726e51f2e3bb028700e5fc61779f6edc53cee36
|
scripts/init_tree.py
|
scripts/init_tree.py
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
Update to copy new scripts
|
Update to copy new scripts
|
Python
|
bsd-3-clause
|
lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123
|
python
|
## Code Before:
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
## Instruction:
Update to copy new scripts
## Code After:
import os
import shutil
def main():
cwd = os.getcwd()
if not cwd.endswith(os.path.join('FRENSIE', 'scripts')):
print 'This script must be run in \"FRENSIE/scipts\"'
print 'Your CWD is', cwd
return 1
os.chdir('../../')
os.mkdir('frensie_build_tree')
#os.renames('FRENSIE', 'frensie_build_tree/FRENSIE')
os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE')
os.chdir('frensie_build_tree')
os.symlink('FRENSIE', 'src')
os.mkdir('deps')
os.mkdir('deps/install')
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
cd to frensie_build_tree/deps and run make all.
To once all dependecies exist in frensie_build_tree/deps/install
cd to frensie_build_tree/build and run bash frensie.sh
"""
if __name__ == '__main__':
main()
|
// ... existing code ...
os.mkdir('deps/tars')
os.mkdir('build')
shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile')
shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh')
shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh')
#shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh')
print """
To build dependencies move necessary tars to deps/tars.
// ... rest of the code ...
|
20cf8ae478c2712d4c211b49868e334357f05356
|
src/include/storage/copydir.h
|
src/include/storage/copydir.h
|
/*-------------------------------------------------------------------------
*
* copydir.h
* Header for src/port/copydir.c compatibility functions.
*
* Portions Copyright (c) 1996-2010, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* src/include/storage/copydir.h
*
*-------------------------------------------------------------------------
*/
#ifndef COPYDIR_H
#define COPYDIR_H
extern void copydir(char *fromdir, char *todir, bool recurse);
#endif /* COPYDIR_H */
|
/*-------------------------------------------------------------------------
*
* copydir.h
* Copy a directory.
*
* Portions Copyright (c) 1996-2010, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* src/include/storage/copydir.h
*
*-------------------------------------------------------------------------
*/
#ifndef COPYDIR_H
#define COPYDIR_H
extern void copydir(char *fromdir, char *todir, bool recurse);
#endif /* COPYDIR_H */
|
Fix copy-and-pasteo a little more completely.
|
Fix copy-and-pasteo a little more completely.
copydir.c is no longer in src/port
|
C
|
apache-2.0
|
50wu/gpdb,50wu/gpdb,arcivanov/postgres-xl,pavanvd/postgres-xl,Postgres-XL/Postgres-XL,pavanvd/postgres-xl,techdragon/Postgres-XL,oberstet/postgres-xl,adam8157/gpdb,lisakowen/gpdb,tpostgres-projects/tPostgres,xinzweb/gpdb,ashwinstar/gpdb,pavanvd/postgres-xl,techdragon/Postgres-XL,postmind-net/postgres-xl,tpostgres-projects/tPostgres,ovr/postgres-xl,snaga/postgres-xl,ashwinstar/gpdb,snaga/postgres-xl,tpostgres-projects/tPostgres,lisakowen/gpdb,greenplum-db/gpdb,jmcatamney/gpdb,snaga/postgres-xl,kmjungersen/PostgresXL,postmind-net/postgres-xl,ashwinstar/gpdb,pavanvd/postgres-xl,oberstet/postgres-xl,yazun/postgres-xl,greenplum-db/gpdb,techdragon/Postgres-XL,snaga/postgres-xl,oberstet/postgres-xl,ashwinstar/gpdb,ovr/postgres-xl,xinzweb/gpdb,postmind-net/postgres-xl,lisakowen/gpdb,adam8157/gpdb,kmjungersen/PostgresXL,adam8157/gpdb,jmcatamney/gpdb,yazun/postgres-xl,jmcatamney/gpdb,jmcatamney/gpdb,Postgres-XL/Postgres-XL,postmind-net/postgres-xl,lisakowen/gpdb,Postgres-XL/Postgres-XL,kmjungersen/PostgresXL,greenplum-db/gpdb,adam8157/gpdb,yazun/postgres-xl,xinzweb/gpdb,lisakowen/gpdb,oberstet/postgres-xl,xinzweb/gpdb,greenplum-db/gpdb,50wu/gpdb,ovr/postgres-xl,greenplum-db/gpdb,greenplum-db/gpdb,pavanvd/postgres-xl,lisakowen/gpdb,oberstet/postgres-xl,zeroae/postgres-xl,kmjungersen/PostgresXL,adam8157/gpdb,arcivanov/postgres-xl,techdragon/Postgres-XL,zeroae/postgres-xl,arcivanov/postgres-xl,jmcatamney/gpdb,50wu/gpdb,50wu/gpdb,ashwinstar/gpdb,jmcatamney/gpdb,jmcatamney/gpdb,zeroae/postgres-xl,adam8157/gpdb,snaga/postgres-xl,lisakowen/gpdb,tpostgres-projects/tPostgres,ovr/postgres-xl,greenplum-db/gpdb,Postgres-XL/Postgres-XL,zeroae/postgres-xl,arcivanov/postgres-xl,xinzweb/gpdb,arcivanov/postgres-xl,xinzweb/gpdb,techdragon/Postgres-XL,jmcatamney/gpdb,arcivanov/postgres-xl,kmjungersen/PostgresXL,ovr/postgres-xl,tpostgres-projects/tPostgres,greenplum-db/gpdb,zeroae/postgres-xl,postmind-net/postgres-xl,yazun/postgres-xl,ashwinstar/gpdb,50wu/gpdb,xinzweb/gpdb,xinzweb/gpdb,Postgres-XL/Postgres-XL,adam8157/gpdb,yazun/postgres-xl,50wu/gpdb,ashwinstar/gpdb,lisakowen/gpdb,adam8157/gpdb,50wu/gpdb,ashwinstar/gpdb
|
c
|
## Code Before:
/*-------------------------------------------------------------------------
*
* copydir.h
* Header for src/port/copydir.c compatibility functions.
*
* Portions Copyright (c) 1996-2010, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* src/include/storage/copydir.h
*
*-------------------------------------------------------------------------
*/
#ifndef COPYDIR_H
#define COPYDIR_H
extern void copydir(char *fromdir, char *todir, bool recurse);
#endif /* COPYDIR_H */
## Instruction:
Fix copy-and-pasteo a little more completely.
copydir.c is no longer in src/port
## Code After:
/*-------------------------------------------------------------------------
*
* copydir.h
* Copy a directory.
*
* Portions Copyright (c) 1996-2010, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* src/include/storage/copydir.h
*
*-------------------------------------------------------------------------
*/
#ifndef COPYDIR_H
#define COPYDIR_H
extern void copydir(char *fromdir, char *todir, bool recurse);
#endif /* COPYDIR_H */
|
# ... existing code ...
/*-------------------------------------------------------------------------
*
* copydir.h
* Copy a directory.
*
* Portions Copyright (c) 1996-2010, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
# ... rest of the code ...
|
67ab4b7374d739719700f84f0f5726f1b0c476d8
|
cybox/test/objects/mutex_test.py
|
cybox/test/objects/mutex_test.py
|
import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
|
import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
|
Fix some more unittest assert methods for Python 2.6
|
Fix some more unittest assert methods for Python 2.6
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
python
|
## Code Before:
import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
## Instruction:
Fix some more unittest assert methods for Python 2.6
## Code After:
import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
|
...
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
...
|
b95bab6acacffc3b59e4d5a57d06f21159742044
|
setup.py
|
setup.py
|
'''
Python wrapper for the OpenAQ API
Written originally by David H Hagan
December 2015
'''
__version__ = '1.1.0'
try:
from setuptools import setup
except:
from distutils.core import setup
setup(
name = 'py-openaq',
version = __version__,
description = 'Python wrapper for the OpenAQ API',
keywords = ['OpenAQ', 'MIT', 'Air Quality'],
author = 'David H Hagan',
author_email = '[email protected]',
url = 'https://github.com/dhhagan/py-openaq',
license = 'MIT',
packages = ['openaq'],
test_suite = 'tests',
classifiers = [
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Atmospheric Science',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
'''
Python wrapper for the OpenAQ API
Written originally by David H Hagan
December 2015
'''
__version__ = '1.1.0'
try:
from setuptools import setup
except:
from distutils.core import setup
setup(
name = 'py-openaq',
version = __version__,
description = 'Python wrapper for the OpenAQ API',
keywords = ['OpenAQ', 'MIT', 'Air Quality'],
author = 'David H Hagan',
author_email = '[email protected]',
url = 'https://github.com/dhhagan/py-openaq',
license = 'MIT',
packages = ['openaq'],
install_requires = ['requests'],
test_suite = 'tests',
classifiers = [
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Atmospheric Science',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Add requests package as dependency
|
Add requests package as dependency
The package depends on the requests package, which is not explicitly mentioned in the setup file. Path adds this as requirement to the setup file.
|
Python
|
mit
|
dhhagan/py-openaq,dhhagan/py-openaq
|
python
|
## Code Before:
'''
Python wrapper for the OpenAQ API
Written originally by David H Hagan
December 2015
'''
__version__ = '1.1.0'
try:
from setuptools import setup
except:
from distutils.core import setup
setup(
name = 'py-openaq',
version = __version__,
description = 'Python wrapper for the OpenAQ API',
keywords = ['OpenAQ', 'MIT', 'Air Quality'],
author = 'David H Hagan',
author_email = '[email protected]',
url = 'https://github.com/dhhagan/py-openaq',
license = 'MIT',
packages = ['openaq'],
test_suite = 'tests',
classifiers = [
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Atmospheric Science',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
## Instruction:
Add requests package as dependency
The package depends on the requests package, which is not explicitly mentioned in the setup file. Path adds this as requirement to the setup file.
## Code After:
'''
Python wrapper for the OpenAQ API
Written originally by David H Hagan
December 2015
'''
__version__ = '1.1.0'
try:
from setuptools import setup
except:
from distutils.core import setup
setup(
name = 'py-openaq',
version = __version__,
description = 'Python wrapper for the OpenAQ API',
keywords = ['OpenAQ', 'MIT', 'Air Quality'],
author = 'David H Hagan',
author_email = '[email protected]',
url = 'https://github.com/dhhagan/py-openaq',
license = 'MIT',
packages = ['openaq'],
install_requires = ['requests'],
test_suite = 'tests',
classifiers = [
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Atmospheric Science',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
// ... existing code ...
url = 'https://github.com/dhhagan/py-openaq',
license = 'MIT',
packages = ['openaq'],
install_requires = ['requests'],
test_suite = 'tests',
classifiers = [
'Development Status :: 3 - Alpha',
// ... rest of the code ...
|
8cc1385ae485bb02c3ae28df5e6de8dfe60d152c
|
transformers/src/main/java/com/pushtechnology/diffusion/transform/transformer/SafeTransformer.java
|
transformers/src/main/java/com/pushtechnology/diffusion/transform/transformer/SafeTransformer.java
|
/*******************************************************************************
* Copyright (C) 2016 Push Technology Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.pushtechnology.diffusion.transform.transformer;
/**
* A transformer. Converts values of one type into values of a different
* type. It will not throw an exception.
*
* @param <S> the type of the source values
* @param <T> the type of the transformed values
* @author Push Technology Limited
* @deprecated since 2.0.0 in favour of {@link java.util.function.Function}
*/
@SuppressWarnings("deprecation")
@Deprecated
public interface SafeTransformer<S, T> extends Transformer<S, T> {
@Override
T transform(S value);
}
|
/*******************************************************************************
* Copyright (C) 2016 Push Technology Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.pushtechnology.diffusion.transform.transformer;
import java.util.function.Function;
/**
* A transformer. Converts values of one type into values of a different
* type. It will not throw an exception.
*
* @param <S> the type of the source values
* @param <T> the type of the transformed values
* @author Push Technology Limited
* @deprecated since 2.0.0 in favour of {@link java.util.function.Function}
*/
@SuppressWarnings("deprecation")
@Deprecated
public interface SafeTransformer<S, T> extends Transformer<S, T> {
@Override
T transform(S value);
/**
* Convert the transformer to a function.
*
* @return the transformer as a function
*/
default Function<S, T> asFunction() {
return this::transform;
}
}
|
Add a method to convert a safe transformer to a function.
|
Add a method to convert a safe transformer to a function.
|
Java
|
apache-2.0
|
pushtechnology/diffusion-transform
|
java
|
## Code Before:
/*******************************************************************************
* Copyright (C) 2016 Push Technology Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.pushtechnology.diffusion.transform.transformer;
/**
* A transformer. Converts values of one type into values of a different
* type. It will not throw an exception.
*
* @param <S> the type of the source values
* @param <T> the type of the transformed values
* @author Push Technology Limited
* @deprecated since 2.0.0 in favour of {@link java.util.function.Function}
*/
@SuppressWarnings("deprecation")
@Deprecated
public interface SafeTransformer<S, T> extends Transformer<S, T> {
@Override
T transform(S value);
}
## Instruction:
Add a method to convert a safe transformer to a function.
## Code After:
/*******************************************************************************
* Copyright (C) 2016 Push Technology Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.pushtechnology.diffusion.transform.transformer;
import java.util.function.Function;
/**
* A transformer. Converts values of one type into values of a different
* type. It will not throw an exception.
*
* @param <S> the type of the source values
* @param <T> the type of the transformed values
* @author Push Technology Limited
* @deprecated since 2.0.0 in favour of {@link java.util.function.Function}
*/
@SuppressWarnings("deprecation")
@Deprecated
public interface SafeTransformer<S, T> extends Transformer<S, T> {
@Override
T transform(S value);
/**
* Convert the transformer to a function.
*
* @return the transformer as a function
*/
default Function<S, T> asFunction() {
return this::transform;
}
}
|
...
package com.pushtechnology.diffusion.transform.transformer;
import java.util.function.Function;
/**
* A transformer. Converts values of one type into values of a different
* type. It will not throw an exception.
...
public interface SafeTransformer<S, T> extends Transformer<S, T> {
@Override
T transform(S value);
/**
* Convert the transformer to a function.
*
* @return the transformer as a function
*/
default Function<S, T> asFunction() {
return this::transform;
}
}
...
|
33b3a55ff8277e199bea9a2dea0cbc6d4e25a3b7
|
Alkitab/src/yuku/alkitab/base/widget/VerseInlineLinkSpan.java
|
Alkitab/src/yuku/alkitab/base/widget/VerseInlineLinkSpan.java
|
package yuku.alkitab.base.widget;
import android.text.style.ClickableSpan;
import android.view.View;
public abstract class VerseInlineLinkSpan extends ClickableSpan {
public interface Factory {
VerseInlineLinkSpan create(final Type type, final int arif);
}
private final Type type;
private final int arif;
private final Object source;
public enum Type {
footnote,
xref,
}
public VerseInlineLinkSpan(final Type type, final int arif, final Object source) {
this.type = type;
this.arif = arif;
this.source = source;
}
@Override
public final void onClick(final View widget) {
onClick(type, arif, source);
}
public abstract void onClick(final Type type, final int arif, final Object source);
}
|
package yuku.alkitab.base.widget;
import android.text.TextPaint;
import android.text.style.ClickableSpan;
import android.view.View;
public abstract class VerseInlineLinkSpan extends ClickableSpan {
public interface Factory {
VerseInlineLinkSpan create(final Type type, final int arif);
}
private final Type type;
private final int arif;
private final Object source;
public enum Type {
footnote,
xref,
}
public VerseInlineLinkSpan(final Type type, final int arif, final Object source) {
this.type = type;
this.arif = arif;
this.source = source;
}
@Override
public final void onClick(final View widget) {
onClick(type, arif, source);
}
public abstract void onClick(final Type type, final int arif, final Object source);
@Override
public void updateDrawState(final TextPaint ds) {
// don't call super to prevent link underline and link coloring
// NOP
}
}
|
Remove coloring and underlining of verse inline links.
|
Remove coloring and underlining of verse inline links.
|
Java
|
apache-2.0
|
infojulio/androidbible,yukuku/androidbible,Jaden-J/androidbible,yukuku/androidbible,yukuku/androidbible,infojulio/androidbible,Jaden-J/androidbible,arnotixe/androidbible,infojulio/androidbible,yukuku/androidbible,yukuku/androidbible,Jaden-J/androidbible,arnotixe/androidbible,arnotixe/androidbible,infojulio/androidbible,arnotixe/androidbible,infojulio/androidbible,infojulio/androidbible,yukuku/androidbible,yukuku/androidbible,Jaden-J/androidbible,arnotixe/androidbible,yukuku/androidbible,infojulio/androidbible,Jaden-J/androidbible,Jaden-J/androidbible,infojulio/androidbible,Jaden-J/androidbible
|
java
|
## Code Before:
package yuku.alkitab.base.widget;
import android.text.style.ClickableSpan;
import android.view.View;
public abstract class VerseInlineLinkSpan extends ClickableSpan {
public interface Factory {
VerseInlineLinkSpan create(final Type type, final int arif);
}
private final Type type;
private final int arif;
private final Object source;
public enum Type {
footnote,
xref,
}
public VerseInlineLinkSpan(final Type type, final int arif, final Object source) {
this.type = type;
this.arif = arif;
this.source = source;
}
@Override
public final void onClick(final View widget) {
onClick(type, arif, source);
}
public abstract void onClick(final Type type, final int arif, final Object source);
}
## Instruction:
Remove coloring and underlining of verse inline links.
## Code After:
package yuku.alkitab.base.widget;
import android.text.TextPaint;
import android.text.style.ClickableSpan;
import android.view.View;
public abstract class VerseInlineLinkSpan extends ClickableSpan {
public interface Factory {
VerseInlineLinkSpan create(final Type type, final int arif);
}
private final Type type;
private final int arif;
private final Object source;
public enum Type {
footnote,
xref,
}
public VerseInlineLinkSpan(final Type type, final int arif, final Object source) {
this.type = type;
this.arif = arif;
this.source = source;
}
@Override
public final void onClick(final View widget) {
onClick(type, arif, source);
}
public abstract void onClick(final Type type, final int arif, final Object source);
@Override
public void updateDrawState(final TextPaint ds) {
// don't call super to prevent link underline and link coloring
// NOP
}
}
|
# ... existing code ...
package yuku.alkitab.base.widget;
import android.text.TextPaint;
import android.text.style.ClickableSpan;
import android.view.View;
# ... modified code ...
}
public abstract void onClick(final Type type, final int arif, final Object source);
@Override
public void updateDrawState(final TextPaint ds) {
// don't call super to prevent link underline and link coloring
// NOP
}
}
# ... rest of the code ...
|
95f89ab590555bd4cc6c92b6b24883a27b323d2a
|
tests/test_methods.py
|
tests/test_methods.py
|
from apiritif import http
from unittest import TestCase
class TestRequests(TestCase):
def test_get(self):
http.get('http://blazedemo.com/?tag=get')
def test_post(self):
http.post('http://blazedemo.com/?tag=post')
def test_put(self):
http.put('http://blazedemo.com/?tag=put')
def test_patch(self):
http.patch('http://blazedemo.com/?tag=patch')
def test_head(self):
http.head('http://blazedemo.com/?tag=head')
def test_delete(self):
http.delete('http://blazedemo.com/?tag=delete')
def test_options(self):
http.options('http://blazedemo.com/echo.php?echo=options')
def test_connect(self):
target = http.target('http://blazedemo.com/', auto_assert_ok=False)
target.connect('/echo.php?echo=connect')
|
from apiritif import http
from unittest import TestCase
class TestHTTPMethods(TestCase):
def test_get(self):
http.get('http://blazedemo.com/?tag=get')
def test_post(self):
http.post('http://blazedemo.com/?tag=post')
def test_put(self):
http.put('http://blazedemo.com/?tag=put')
def test_patch(self):
http.patch('http://blazedemo.com/?tag=patch')
def test_head(self):
http.head('http://blazedemo.com/?tag=head')
def test_delete(self):
http.delete('http://blazedemo.com/?tag=delete')
def test_options(self):
http.options('http://blazedemo.com/echo.php?echo=options')
class TestTargetMethods(TestCase):
def setUp(self):
self.target = http.target('http://blazedemo.com', auto_assert_ok=False)
def test_get(self):
self.target.get('/echo.php?echo=get').assert_ok()
def test_post(self):
self.target.post('/echo.php?echo=post').assert_ok()
def test_put(self):
self.target.put('/echo.php?echo=put').assert_ok()
def test_patch(self):
self.target.patch('/echo.php?echo=patch').assert_ok()
def test_delete(self):
self.target.delete('/echo.php?echo=delete').assert_ok()
def test_head(self):
self.target.head('/echo.php?echo=head').assert_ok()
def test_options(self):
self.target.options('/echo.php?echo=options').assert_ok()
def test_connect(self):
self.target.connect('/echo.php?echo=connect')
|
Add a lot more tests
|
Add a lot more tests
|
Python
|
apache-2.0
|
Blazemeter/apiritif,Blazemeter/apiritif
|
python
|
## Code Before:
from apiritif import http
from unittest import TestCase
class TestRequests(TestCase):
def test_get(self):
http.get('http://blazedemo.com/?tag=get')
def test_post(self):
http.post('http://blazedemo.com/?tag=post')
def test_put(self):
http.put('http://blazedemo.com/?tag=put')
def test_patch(self):
http.patch('http://blazedemo.com/?tag=patch')
def test_head(self):
http.head('http://blazedemo.com/?tag=head')
def test_delete(self):
http.delete('http://blazedemo.com/?tag=delete')
def test_options(self):
http.options('http://blazedemo.com/echo.php?echo=options')
def test_connect(self):
target = http.target('http://blazedemo.com/', auto_assert_ok=False)
target.connect('/echo.php?echo=connect')
## Instruction:
Add a lot more tests
## Code After:
from apiritif import http
from unittest import TestCase
class TestHTTPMethods(TestCase):
def test_get(self):
http.get('http://blazedemo.com/?tag=get')
def test_post(self):
http.post('http://blazedemo.com/?tag=post')
def test_put(self):
http.put('http://blazedemo.com/?tag=put')
def test_patch(self):
http.patch('http://blazedemo.com/?tag=patch')
def test_head(self):
http.head('http://blazedemo.com/?tag=head')
def test_delete(self):
http.delete('http://blazedemo.com/?tag=delete')
def test_options(self):
http.options('http://blazedemo.com/echo.php?echo=options')
class TestTargetMethods(TestCase):
def setUp(self):
self.target = http.target('http://blazedemo.com', auto_assert_ok=False)
def test_get(self):
self.target.get('/echo.php?echo=get').assert_ok()
def test_post(self):
self.target.post('/echo.php?echo=post').assert_ok()
def test_put(self):
self.target.put('/echo.php?echo=put').assert_ok()
def test_patch(self):
self.target.patch('/echo.php?echo=patch').assert_ok()
def test_delete(self):
self.target.delete('/echo.php?echo=delete').assert_ok()
def test_head(self):
self.target.head('/echo.php?echo=head').assert_ok()
def test_options(self):
self.target.options('/echo.php?echo=options').assert_ok()
def test_connect(self):
self.target.connect('/echo.php?echo=connect')
|
...
from unittest import TestCase
class TestHTTPMethods(TestCase):
def test_get(self):
http.get('http://blazedemo.com/?tag=get')
...
def test_options(self):
http.options('http://blazedemo.com/echo.php?echo=options')
class TestTargetMethods(TestCase):
def setUp(self):
self.target = http.target('http://blazedemo.com', auto_assert_ok=False)
def test_get(self):
self.target.get('/echo.php?echo=get').assert_ok()
def test_post(self):
self.target.post('/echo.php?echo=post').assert_ok()
def test_put(self):
self.target.put('/echo.php?echo=put').assert_ok()
def test_patch(self):
self.target.patch('/echo.php?echo=patch').assert_ok()
def test_delete(self):
self.target.delete('/echo.php?echo=delete').assert_ok()
def test_head(self):
self.target.head('/echo.php?echo=head').assert_ok()
def test_options(self):
self.target.options('/echo.php?echo=options').assert_ok()
def test_connect(self):
self.target.connect('/echo.php?echo=connect')
...
|
43f67067c470386b6b24080642cc845ec1655f58
|
utils/networking.py
|
utils/networking.py
|
import fcntl
import socket
import struct
from contextlib import contextmanager
@contextmanager
def use_interface(ifname):
"""
:type ifname: str
"""
ip = _ip_address_for_interface(ifname.encode('ascii'))
original_socket = socket.socket
def rebound_socket(*args, **kwargs):
sock = original_socket(*args, **kwargs)
sock.bind((ip, 0))
return sock
socket.socket = rebound_socket
yield
socket.socket = original_socket
def _ip_address_for_interface(ifname):
"""
:type ifname: bytes
:rtype: str
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sock.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
|
import fcntl
import socket
import struct
from contextlib import contextmanager
@contextmanager
def use_interface(ifname):
"""
:type ifname: str
"""
ip = _ip_address_for_interface(ifname)
original_socket = socket.socket
def rebound_socket(*args, **kwargs):
sock = original_socket(*args, **kwargs)
sock.bind((ip, 0))
return sock
socket.socket = rebound_socket
yield
socket.socket = original_socket
def _ip_address_for_interface(ifname):
"""
:type ifname: str
:rtype: str
"""
ifname = ifname.encode('ascii')
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sock.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
|
Make _ip_address_for_interface easier to use
|
Make _ip_address_for_interface easier to use
|
Python
|
apache-2.0
|
OPWEN/opwen-webapp,ascoderu/opwen-webapp,ascoderu/opwen-webapp,OPWEN/opwen-webapp,OPWEN/opwen-webapp,ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver,ascoderu/opwen-webapp
|
python
|
## Code Before:
import fcntl
import socket
import struct
from contextlib import contextmanager
@contextmanager
def use_interface(ifname):
"""
:type ifname: str
"""
ip = _ip_address_for_interface(ifname.encode('ascii'))
original_socket = socket.socket
def rebound_socket(*args, **kwargs):
sock = original_socket(*args, **kwargs)
sock.bind((ip, 0))
return sock
socket.socket = rebound_socket
yield
socket.socket = original_socket
def _ip_address_for_interface(ifname):
"""
:type ifname: bytes
:rtype: str
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sock.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
## Instruction:
Make _ip_address_for_interface easier to use
## Code After:
import fcntl
import socket
import struct
from contextlib import contextmanager
@contextmanager
def use_interface(ifname):
"""
:type ifname: str
"""
ip = _ip_address_for_interface(ifname)
original_socket = socket.socket
def rebound_socket(*args, **kwargs):
sock = original_socket(*args, **kwargs)
sock.bind((ip, 0))
return sock
socket.socket = rebound_socket
yield
socket.socket = original_socket
def _ip_address_for_interface(ifname):
"""
:type ifname: str
:rtype: str
"""
ifname = ifname.encode('ascii')
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sock.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
|
...
:type ifname: str
"""
ip = _ip_address_for_interface(ifname)
original_socket = socket.socket
def rebound_socket(*args, **kwargs):
...
def _ip_address_for_interface(ifname):
"""
:type ifname: str
:rtype: str
"""
ifname = ifname.encode('ascii')
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
sock.fileno(),
...
|
ee4c8b806ecf0ada51916fe63f9da9e81c03850d
|
django_react_templatetags/tests/demosite/urls.py
|
django_react_templatetags/tests/demosite/urls.py
|
from django.urls import path
from django_react_templatetags.tests.demosite import views
urlpatterns = [
path(
'static-react-view',
views.StaticReactView.as_view(),
name='static_react_view',
),
]
|
from django.conf.urls import url
from django_react_templatetags.tests.demosite import views
urlpatterns = [
url(
'static-react-view',
views.StaticReactView.as_view(),
name='static_react_view',
),
]
|
Use url instead of path (to keep django 1 compat)
|
Use url instead of path (to keep django 1 compat)
|
Python
|
mit
|
Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags
|
python
|
## Code Before:
from django.urls import path
from django_react_templatetags.tests.demosite import views
urlpatterns = [
path(
'static-react-view',
views.StaticReactView.as_view(),
name='static_react_view',
),
]
## Instruction:
Use url instead of path (to keep django 1 compat)
## Code After:
from django.conf.urls import url
from django_react_templatetags.tests.demosite import views
urlpatterns = [
url(
'static-react-view',
views.StaticReactView.as_view(),
name='static_react_view',
),
]
|
// ... existing code ...
from django.conf.urls import url
from django_react_templatetags.tests.demosite import views
urlpatterns = [
url(
'static-react-view',
views.StaticReactView.as_view(),
name='static_react_view',
// ... rest of the code ...
|
dc755e07516e1cbbcd01f01e8be59abf8f1a6329
|
humfrey/update/management/commands/update_dataset.py
|
humfrey/update/management/commands/update_dataset.py
|
import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': 'manual',
'queued_at': datetime.datetime.now(),
})))
|
import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
trigger = args[1] if len(args) > 1 else 'manual'
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': trigger,
'queued_at': datetime.datetime.now(),
})))
if __name__ == '__main__':
import sys
Command().handle(*sys.argv[1:])
|
Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
|
Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
|
Python
|
bsd-3-clause
|
ox-it/humfrey,ox-it/humfrey,ox-it/humfrey
|
python
|
## Code Before:
import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': 'manual',
'queued_at': datetime.datetime.now(),
})))
## Instruction:
Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
## Code After:
import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
trigger = args[1] if len(args) > 1 else 'manual'
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': trigger,
'queued_at': datetime.datetime.now(),
})))
if __name__ == '__main__':
import sys
Command().handle(*sys.argv[1:])
|
# ... existing code ...
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
trigger = args[1] if len(args) > 1 else 'manual'
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
# ... modified code ...
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': trigger,
'queued_at': datetime.datetime.now(),
})))
if __name__ == '__main__':
import sys
Command().handle(*sys.argv[1:])
# ... rest of the code ...
|
5cd9499fcc0c1f9b48216aeca11a7adcd8995a47
|
netmiko/mrv/mrv_ssh.py
|
netmiko/mrv/mrv_ssh.py
|
"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
|
"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
|
Fix for MRV failing to enter enable mode
|
Fix for MRV failing to enter enable mode
|
Python
|
mit
|
ktbyers/netmiko,ktbyers/netmiko
|
python
|
## Code Before:
"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
## Instruction:
Fix for MRV failing to enter enable mode
## Code After:
"""MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
|
...
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
...
|
ef8f03fe761ede729d5e1ee93f70ae6c40994bad
|
ir/be/ia32/ia32_optimize.h
|
ir/be/ia32/ia32_optimize.h
|
/*
* This file is part of libFirm.
* Copyright (C) 2012 University of Karlsruhe.
*/
/**
* @file
* @brief Implements several optimizations for IA32.
* @author Christian Wuerdig
*/
#ifndef FIRM_BE_IA32_IA32_OPTIMIZE_H
#define FIRM_BE_IA32_IA32_OPTIMIZE_H
#include "firm_types.h"
/**
* Prepares irg for codegeneration.
*/
void ia32_pre_transform_phase(ir_graph *irg);
/**
* Performs conv and address mode optimizations.
* @param cg The ia32 codegenerator object
*/
void ia32_optimize_graph(ir_graph *irg);
/**
* Performs Peephole Optimizations an a graph.
*
* @param irg the graph
* @param cg the code generator object
*/
void ia32_peephole_optimization(ir_graph *irg);
/** Initialize the ia32 address mode optimizer. */
void ia32_init_optimize(void);
#endif
|
/*
* This file is part of libFirm.
* Copyright (C) 2012 University of Karlsruhe.
*/
/**
* @file
* @brief Implements several optimizations for IA32.
* @author Christian Wuerdig
*/
#ifndef FIRM_BE_IA32_IA32_OPTIMIZE_H
#define FIRM_BE_IA32_IA32_OPTIMIZE_H
#include "firm_types.h"
/**
* Performs conv and address mode optimizations.
* @param cg The ia32 codegenerator object
*/
void ia32_optimize_graph(ir_graph *irg);
/**
* Performs Peephole Optimizations an a graph.
*
* @param irg the graph
* @param cg the code generator object
*/
void ia32_peephole_optimization(ir_graph *irg);
/** Initialize the ia32 address mode optimizer. */
void ia32_init_optimize(void);
#endif
|
Remove stale declaration of 'ia32_pre_transform_phase()'.
|
ia32: Remove stale declaration of 'ia32_pre_transform_phase()'.
This function was deleted in 2007!
|
C
|
lgpl-2.1
|
jonashaag/libfirm,libfirm/libfirm,MatzeB/libfirm,libfirm/libfirm,jonashaag/libfirm,jonashaag/libfirm,jonashaag/libfirm,MatzeB/libfirm,jonashaag/libfirm,MatzeB/libfirm,libfirm/libfirm,jonashaag/libfirm,MatzeB/libfirm,libfirm/libfirm,MatzeB/libfirm,libfirm/libfirm,jonashaag/libfirm,MatzeB/libfirm,MatzeB/libfirm
|
c
|
## Code Before:
/*
* This file is part of libFirm.
* Copyright (C) 2012 University of Karlsruhe.
*/
/**
* @file
* @brief Implements several optimizations for IA32.
* @author Christian Wuerdig
*/
#ifndef FIRM_BE_IA32_IA32_OPTIMIZE_H
#define FIRM_BE_IA32_IA32_OPTIMIZE_H
#include "firm_types.h"
/**
* Prepares irg for codegeneration.
*/
void ia32_pre_transform_phase(ir_graph *irg);
/**
* Performs conv and address mode optimizations.
* @param cg The ia32 codegenerator object
*/
void ia32_optimize_graph(ir_graph *irg);
/**
* Performs Peephole Optimizations an a graph.
*
* @param irg the graph
* @param cg the code generator object
*/
void ia32_peephole_optimization(ir_graph *irg);
/** Initialize the ia32 address mode optimizer. */
void ia32_init_optimize(void);
#endif
## Instruction:
ia32: Remove stale declaration of 'ia32_pre_transform_phase()'.
This function was deleted in 2007!
## Code After:
/*
* This file is part of libFirm.
* Copyright (C) 2012 University of Karlsruhe.
*/
/**
* @file
* @brief Implements several optimizations for IA32.
* @author Christian Wuerdig
*/
#ifndef FIRM_BE_IA32_IA32_OPTIMIZE_H
#define FIRM_BE_IA32_IA32_OPTIMIZE_H
#include "firm_types.h"
/**
* Performs conv and address mode optimizations.
* @param cg The ia32 codegenerator object
*/
void ia32_optimize_graph(ir_graph *irg);
/**
* Performs Peephole Optimizations an a graph.
*
* @param irg the graph
* @param cg the code generator object
*/
void ia32_peephole_optimization(ir_graph *irg);
/** Initialize the ia32 address mode optimizer. */
void ia32_init_optimize(void);
#endif
|
...
#define FIRM_BE_IA32_IA32_OPTIMIZE_H
#include "firm_types.h"
/**
* Performs conv and address mode optimizations.
...
|
a89e7f9f625427d558300eb5e5cbc2881cdcc207
|
get_a_job/__init__.py
|
get_a_job/__init__.py
|
from flask import Flask
from flask.ext.restful import Api
from .models import db
from .api import configure_api
def create_app(object_name):
app = Flask(object_name)
app.config.from_object(object_name)
db.init_app(app)
configure_api(app)
return app
|
from flask import Flask
from flask.ext.restful import Api
from .models import db
from .api import configure_api
def create_app(object_name, **kwargs):
app = Flask(object_name)
app.config.from_object(object_name)
app.config.update(kwargs)
db.init_app(app)
configure_api(app)
return app
|
Add optional configuration customization of app.
|
Add optional configuration customization of app.
|
Python
|
mit
|
smoynes/get_a_job
|
python
|
## Code Before:
from flask import Flask
from flask.ext.restful import Api
from .models import db
from .api import configure_api
def create_app(object_name):
app = Flask(object_name)
app.config.from_object(object_name)
db.init_app(app)
configure_api(app)
return app
## Instruction:
Add optional configuration customization of app.
## Code After:
from flask import Flask
from flask.ext.restful import Api
from .models import db
from .api import configure_api
def create_app(object_name, **kwargs):
app = Flask(object_name)
app.config.from_object(object_name)
app.config.update(kwargs)
db.init_app(app)
configure_api(app)
return app
|
// ... existing code ...
from .models import db
from .api import configure_api
def create_app(object_name, **kwargs):
app = Flask(object_name)
app.config.from_object(object_name)
app.config.update(kwargs)
db.init_app(app)
configure_api(app)
// ... rest of the code ...
|
126d79011221da6692d70f9a9aba1f335155ab58
|
us_ignite/apps/management/commands/app_load_fixtures.py
|
us_ignite/apps/management/commands/app_load_fixtures.py
|
from django.core.management.base import BaseCommand
from us_ignite.apps.models import Feature, Domain
FEATURE_LIST = (
'SDN',
'OpenFlow',
'Ultra fast',
'Speed',
'Low-latency',
'Local cloud / edge computing',
)
DOMAIN_LIST = (
'Healthcare',
'Education & Workforce',
'Energy',
'Transportation',
'Entrepreneurship',
'Advanced Manufacturing',
'Public Safety',
'General / Platform / Other',
)
class Command(BaseCommand):
def handle(self, *args, **options):
for feature_name in FEATURE_LIST:
feature, is_new = Feature.objects.get_or_create(name=feature_name)
if is_new:
print "Imported feature: %s" % feature_name
for domain_name in DOMAIN_LIST:
domain, is_new = Domain.objects.get_or_create(name=domain_name)
if is_new:
print "Imported domain: %s" % domain_name
print "Done!"
|
from django.core.management.base import BaseCommand
from us_ignite.apps.models import Feature, Domain
FEATURE_LIST = (
'SDN',
'OpenFlow',
'Ultra fast',
'Speed',
'Low-latency',
'Local cloud / edge computing',
'Advanced wireless',
'Ultra-fast/Gigabit to end-user',
'GENI/US Ignite Rack',
'Layer 2',
)
DOMAIN_LIST = (
'Healthcare',
'Education & Workforce',
'Energy',
'Transportation',
'Advanced Manufacturing',
'Public Safety',
'General / Platform / Other',
)
class Command(BaseCommand):
def handle(self, *args, **options):
for feature_name in FEATURE_LIST:
feature, is_new = Feature.objects.get_or_create(name=feature_name)
if is_new:
print "Imported feature: %s" % feature_name
for domain_name in DOMAIN_LIST:
domain, is_new = Domain.objects.get_or_create(name=domain_name)
if is_new:
print "Imported domain: %s" % domain_name
print "Done!"
|
Update app domain initial fixtures.
|
Update app domain initial fixtures.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
python
|
## Code Before:
from django.core.management.base import BaseCommand
from us_ignite.apps.models import Feature, Domain
FEATURE_LIST = (
'SDN',
'OpenFlow',
'Ultra fast',
'Speed',
'Low-latency',
'Local cloud / edge computing',
)
DOMAIN_LIST = (
'Healthcare',
'Education & Workforce',
'Energy',
'Transportation',
'Entrepreneurship',
'Advanced Manufacturing',
'Public Safety',
'General / Platform / Other',
)
class Command(BaseCommand):
def handle(self, *args, **options):
for feature_name in FEATURE_LIST:
feature, is_new = Feature.objects.get_or_create(name=feature_name)
if is_new:
print "Imported feature: %s" % feature_name
for domain_name in DOMAIN_LIST:
domain, is_new = Domain.objects.get_or_create(name=domain_name)
if is_new:
print "Imported domain: %s" % domain_name
print "Done!"
## Instruction:
Update app domain initial fixtures.
## Code After:
from django.core.management.base import BaseCommand
from us_ignite.apps.models import Feature, Domain
FEATURE_LIST = (
'SDN',
'OpenFlow',
'Ultra fast',
'Speed',
'Low-latency',
'Local cloud / edge computing',
'Advanced wireless',
'Ultra-fast/Gigabit to end-user',
'GENI/US Ignite Rack',
'Layer 2',
)
DOMAIN_LIST = (
'Healthcare',
'Education & Workforce',
'Energy',
'Transportation',
'Advanced Manufacturing',
'Public Safety',
'General / Platform / Other',
)
class Command(BaseCommand):
def handle(self, *args, **options):
for feature_name in FEATURE_LIST:
feature, is_new = Feature.objects.get_or_create(name=feature_name)
if is_new:
print "Imported feature: %s" % feature_name
for domain_name in DOMAIN_LIST:
domain, is_new = Domain.objects.get_or_create(name=domain_name)
if is_new:
print "Imported domain: %s" % domain_name
print "Done!"
|
// ... existing code ...
'Speed',
'Low-latency',
'Local cloud / edge computing',
'Advanced wireless',
'Ultra-fast/Gigabit to end-user',
'GENI/US Ignite Rack',
'Layer 2',
)
DOMAIN_LIST = (
// ... modified code ...
'Education & Workforce',
'Energy',
'Transportation',
'Advanced Manufacturing',
'Public Safety',
'General / Platform / Other',
// ... rest of the code ...
|
28a6dcb5481b348e3224182aa16d892bc3cce498
|
src/main/java/bio/terra/cli/command/Status.java
|
src/main/java/bio/terra/cli/command/Status.java
|
package bio.terra.cli.command;
import bio.terra.cli.context.GlobalContext;
import bio.terra.cli.context.WorkspaceContext;
import java.nio.file.FileAlreadyExistsException;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
/** This class corresponds to the second-level "terra status" command. */
@Command(name = "status", description = "Print details about the current workspace.")
public class Status implements Callable<Integer> {
private static final Logger logger = LoggerFactory.getLogger(Status.class);
@Override
public Integer call() throws FileAlreadyExistsException {
GlobalContext globalContext = GlobalContext.readFromFile();
WorkspaceContext workspaceContext = WorkspaceContext.readFromFile();
System.out.println("Terra server: " + globalContext.server.name);
// check if current workspace is defined
if (workspaceContext.isEmpty()) {
System.out.println("There is no current Terra workspace defined.");
} else {
System.out.println("Terra workspace: " + workspaceContext.getWorkspaceId());
System.out.println("Google project: " + workspaceContext.getGoogleProject());
}
return 0;
}
}
|
package bio.terra.cli.command;
import bio.terra.cli.context.GlobalContext;
import bio.terra.cli.context.WorkspaceContext;
import java.nio.file.FileAlreadyExistsException;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
/** This class corresponds to the second-level "terra status" command. */
@Command(name = "status", description = "Print details about the current workspace.")
public class Status implements Callable<Integer> {
private static final Logger logger = LoggerFactory.getLogger(Status.class);
@Override
public Integer call() {
GlobalContext globalContext = GlobalContext.readFromFile();
WorkspaceContext workspaceContext = WorkspaceContext.readFromFile();
System.out.println("Terra server: " + globalContext.server.name);
// check if current workspace is defined
if (workspaceContext.isEmpty()) {
System.out.println("There is no current Terra workspace defined.");
} else {
System.out.println("Terra workspace: " + workspaceContext.getWorkspaceId());
System.out.println("Google project: " + workspaceContext.getGoogleProject());
}
return 0;
}
}
|
Remove exception used for debugging.
|
Remove exception used for debugging.
|
Java
|
bsd-3-clause
|
DataBiosphere/terra-cli,DataBiosphere/terra-cli
|
java
|
## Code Before:
package bio.terra.cli.command;
import bio.terra.cli.context.GlobalContext;
import bio.terra.cli.context.WorkspaceContext;
import java.nio.file.FileAlreadyExistsException;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
/** This class corresponds to the second-level "terra status" command. */
@Command(name = "status", description = "Print details about the current workspace.")
public class Status implements Callable<Integer> {
private static final Logger logger = LoggerFactory.getLogger(Status.class);
@Override
public Integer call() throws FileAlreadyExistsException {
GlobalContext globalContext = GlobalContext.readFromFile();
WorkspaceContext workspaceContext = WorkspaceContext.readFromFile();
System.out.println("Terra server: " + globalContext.server.name);
// check if current workspace is defined
if (workspaceContext.isEmpty()) {
System.out.println("There is no current Terra workspace defined.");
} else {
System.out.println("Terra workspace: " + workspaceContext.getWorkspaceId());
System.out.println("Google project: " + workspaceContext.getGoogleProject());
}
return 0;
}
}
## Instruction:
Remove exception used for debugging.
## Code After:
package bio.terra.cli.command;
import bio.terra.cli.context.GlobalContext;
import bio.terra.cli.context.WorkspaceContext;
import java.nio.file.FileAlreadyExistsException;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
/** This class corresponds to the second-level "terra status" command. */
@Command(name = "status", description = "Print details about the current workspace.")
public class Status implements Callable<Integer> {
private static final Logger logger = LoggerFactory.getLogger(Status.class);
@Override
public Integer call() {
GlobalContext globalContext = GlobalContext.readFromFile();
WorkspaceContext workspaceContext = WorkspaceContext.readFromFile();
System.out.println("Terra server: " + globalContext.server.name);
// check if current workspace is defined
if (workspaceContext.isEmpty()) {
System.out.println("There is no current Terra workspace defined.");
} else {
System.out.println("Terra workspace: " + workspaceContext.getWorkspaceId());
System.out.println("Google project: " + workspaceContext.getGoogleProject());
}
return 0;
}
}
|
...
private static final Logger logger = LoggerFactory.getLogger(Status.class);
@Override
public Integer call() {
GlobalContext globalContext = GlobalContext.readFromFile();
WorkspaceContext workspaceContext = WorkspaceContext.readFromFile();
...
|
8c01b3536026d56abb42daaf9d300e53e7c6dc18
|
detox/main.py
|
detox/main.py
|
import sys
import py
import detox
from detox.proc import Detox
def parse(args):
from tox.session import prepare
return prepare(args)
def main(args=None):
if args is None:
args = sys.argv[1:]
config = parse(args)
#now = py.std.time.time()
detox = Detox(config)
detox.startloopreport()
retcode = detox.runtestsmulti(config.envlist)
#elapsed = py.std.time.time() - now
#cumulated = detox.toxsession.report.cumulated_time
#detox.toxsession.report.line(
# "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % (
# cumulated / elapsed, elapsed, cumulated), bold=True)
return retcode
|
import sys
import py
import detox
from detox.proc import Detox
def parse(args):
from tox.session import prepare
return prepare(args)
def main(args=None):
if args is None:
args = sys.argv[1:]
config = parse(args)
#now = py.std.time.time()
detox = Detox(config)
detox.startloopreport()
retcode = detox.runtestsmulti(config.envlist)
#elapsed = py.std.time.time() - now
#cumulated = detox.toxsession.report.cumulated_time
#detox.toxsession.report.line(
# "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % (
# cumulated / elapsed, elapsed, cumulated), bold=True)
raise SystemExit(retcode)
|
Raise system code on exit from `python -m detox`
|
Raise system code on exit from `python -m detox`
|
Python
|
mit
|
tox-dev/detox
|
python
|
## Code Before:
import sys
import py
import detox
from detox.proc import Detox
def parse(args):
from tox.session import prepare
return prepare(args)
def main(args=None):
if args is None:
args = sys.argv[1:]
config = parse(args)
#now = py.std.time.time()
detox = Detox(config)
detox.startloopreport()
retcode = detox.runtestsmulti(config.envlist)
#elapsed = py.std.time.time() - now
#cumulated = detox.toxsession.report.cumulated_time
#detox.toxsession.report.line(
# "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % (
# cumulated / elapsed, elapsed, cumulated), bold=True)
return retcode
## Instruction:
Raise system code on exit from `python -m detox`
## Code After:
import sys
import py
import detox
from detox.proc import Detox
def parse(args):
from tox.session import prepare
return prepare(args)
def main(args=None):
if args is None:
args = sys.argv[1:]
config = parse(args)
#now = py.std.time.time()
detox = Detox(config)
detox.startloopreport()
retcode = detox.runtestsmulti(config.envlist)
#elapsed = py.std.time.time() - now
#cumulated = detox.toxsession.report.cumulated_time
#detox.toxsession.report.line(
# "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % (
# cumulated / elapsed, elapsed, cumulated), bold=True)
raise SystemExit(retcode)
|
...
#detox.toxsession.report.line(
# "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % (
# cumulated / elapsed, elapsed, cumulated), bold=True)
raise SystemExit(retcode)
...
|
c98ab8807440e3cdbb98e11c53c7f246c35614fe
|
dedupe/convenience.py
|
dedupe/convenience.py
|
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
Change dataSample to generate indices of random pair using list of values
|
Change dataSample to generate indices of random pair using list of values
|
Python
|
mit
|
nmiranda/dedupe,01-/dedupe,neozhangthe1/dedupe,neozhangthe1/dedupe,nmiranda/dedupe,davidkunio/dedupe,dedupeio/dedupe,dedupeio/dedupe-examples,datamade/dedupe,tfmorris/dedupe,tfmorris/dedupe,davidkunio/dedupe,01-/dedupe,datamade/dedupe,pombredanne/dedupe,dedupeio/dedupe,pombredanne/dedupe
|
python
|
## Code Before:
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
random_pairs = dedupe.core.randomPairs(len(data), sample_size)
return tuple((data[k1], data[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
## Instruction:
Change dataSample to generate indices of random pair using list of values
## Code After:
import collections
import dedupe.core
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
blocks = dedupe.core.OrderedDict({})
record_blocks = dedupe.core.OrderedDict({})
key_blocks = dedupe.core.OrderedDict({})
blocker.tfIdfBlocks(data_d.iteritems())
for (record_id, record) in data_d.iteritems():
for key in blocker((record_id, record)):
blocks.setdefault(key, {}).update({record_id : record})
blocked_records = tuple(block for block in blocks.values())
return blocked_records
|
...
def dataSample(data, sample_size):
'''Randomly sample pairs of records from a data dictionary'''
data_list = data.values()
random_pairs = dedupe.core.randomPairs(len(data_list), sample_size)
return tuple((data_list[k1], data_list[k2]) for k1, k2 in random_pairs)
def blockData(data_d, blocker):
...
|
cacbc6825be010f6b839c8d21392a43b8b7b938d
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='pandocfilters',
version='1.0',
description='Utilities for writing pandoc filters in python',
author='John MacFarlane',
author_email='[email protected]',
url='http://github.com/jgm/pandocfilters',
py_modules=['pandocfilters'],
keywords=['pandoc'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Text Processing :: Filters'
],
)
|
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='pandocfilters',
version='1.0',
description='Utilities for writing pandoc filters in python',
long_description=read('README.rst'),
author='John MacFarlane',
author_email='[email protected]',
url='http://github.com/jgm/pandocfilters',
py_modules=['pandocfilters'],
keywords=['pandoc'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Text Processing :: Filters'
],
)
|
INclude README as long description.
|
INclude README as long description.
|
Python
|
bsd-3-clause
|
AugustH/pandocfilters,infotroph/pandocfilters,silvio/pandocfilters,alycosta/pandocfilters,timtylin/scholdoc-filters,jgm/pandocfilters
|
python
|
## Code Before:
from distutils.core import setup
setup(name='pandocfilters',
version='1.0',
description='Utilities for writing pandoc filters in python',
author='John MacFarlane',
author_email='[email protected]',
url='http://github.com/jgm/pandocfilters',
py_modules=['pandocfilters'],
keywords=['pandoc'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Text Processing :: Filters'
],
)
## Instruction:
INclude README as long description.
## Code After:
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='pandocfilters',
version='1.0',
description='Utilities for writing pandoc filters in python',
long_description=read('README.rst'),
author='John MacFarlane',
author_email='[email protected]',
url='http://github.com/jgm/pandocfilters',
py_modules=['pandocfilters'],
keywords=['pandoc'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Text Processing :: Filters'
],
)
|
...
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='pandocfilters',
version='1.0',
description='Utilities for writing pandoc filters in python',
long_description=read('README.rst'),
author='John MacFarlane',
author_email='[email protected]',
url='http://github.com/jgm/pandocfilters',
...
|
13fc59baa49d4f49d4e6ad9e0766ae2aede2fc25
|
proselint/command_line.py
|
proselint/command_line.py
|
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
Raise an error if no file is specified
|
Raise an error if no file is specified
|
Python
|
bsd-3-clause
|
jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
python
|
## Code Before:
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
## Instruction:
Raise an error if no file is specified
## Code After:
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
...
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
...
|
ea2247fe90836e92067ce27e5b22cf8e7dc7bc1b
|
saleor/app/tasks.py
|
saleor/app/tasks.py
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
Add more context to install app msg
|
Add more context to install app msg
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
python
|
## Code Before:
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception:
app_installation.message = "Unknow error. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
## Instruction:
Add more context to install app msg
## Code After:
import logging
from django.core.exceptions import ValidationError
from requests import HTTPError, RequestException
from .. import celeryconf
from ..core import JobStatus
from .installation_utils import install_app
from .models import AppInstallation
logger = logging.getLogger(__name__)
@celeryconf.app.task
def install_app_task(job_id, activate=False):
app_installation = AppInstallation.objects.get(id=job_id)
try:
install_app(app_installation, activate=activate)
app_installation.delete()
return
except ValidationError as e:
msg = ", ".join([f"{name}: {err}" for name, err in e.message_dict.items()])
app_installation.message = msg
except (RequestException, HTTPError) as e:
logger.warning("Failed to install an app. error: %s", e)
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
|
// ... existing code ...
app_installation.message = (
"Failed to connect to app. Try later or contact with app support."
)
except Exception as e:
logger.warning("Failed to install app. error %s", e)
app_installation.message = f"Error {e}. Contact with app support."
app_installation.status = JobStatus.FAILED
app_installation.save()
// ... rest of the code ...
|
7cb4734a837ad9d43ef979085d0f6d474f45178c
|
test_project/select2_outside_admin/views.py
|
test_project/select2_outside_admin/views.py
|
try:
from django.urls import reverse_lazy
except ImportError:
from django.core.urlresolvers import reverse_lazy
from django.forms import inlineformset_factory
from django.views import generic
from select2_many_to_many.forms import TForm
from select2_many_to_many.models import TModel
class UpdateView(generic.UpdateView):
model = TModel
form_class = TForm
template_name = 'select2_outside_admin.html'
success_url = reverse_lazy('select2_outside_admin')
formset_class = inlineformset_factory(
TModel,
TModel,
form=TForm,
extra=1,
fk_name='for_inline',
fields=('name', 'test')
)
def get_object(self):
return TModel.objects.first()
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
def form_valid(self, form):
result = super().form_valid(form)
self.formset.save()
return result
@property
def formset(self):
if '_formset' not in self.__dict__:
setattr(self, '_formset', self.formset_class(
self.request.POST if self.request.method == 'POST' else None,
instance=getattr(self, 'object', self.get_object()),
))
return self._formset
|
try:
from django.urls import reverse_lazy
except ImportError:
from django.core.urlresolvers import reverse_lazy
from django.forms import inlineformset_factory
from django.views import generic
from select2_many_to_many.forms import TForm
from select2_many_to_many.models import TModel
class UpdateView(generic.UpdateView):
model = TModel
form_class = TForm
template_name = 'select2_outside_admin.html'
success_url = reverse_lazy('select2_outside_admin')
formset_class = inlineformset_factory(
TModel,
TModel,
form=TForm,
extra=1,
fk_name='for_inline',
fields=('name', 'test')
)
def get_object(self):
return TModel.objects.first()
def post(self, request, *args, **kwargs):
self.object = self.get_object()
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
def form_valid(self, form):
result = super().form_valid(form)
self.formset.save()
return result
@property
def formset(self):
if '_formset' not in self.__dict__:
setattr(self, '_formset', self.formset_class(
self.request.POST if self.request.method == 'POST' else None,
instance=getattr(self, 'object', self.get_object()),
))
return self._formset
|
Fix example outside the admin
|
Fix example outside the admin
|
Python
|
mit
|
yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light
|
python
|
## Code Before:
try:
from django.urls import reverse_lazy
except ImportError:
from django.core.urlresolvers import reverse_lazy
from django.forms import inlineformset_factory
from django.views import generic
from select2_many_to_many.forms import TForm
from select2_many_to_many.models import TModel
class UpdateView(generic.UpdateView):
model = TModel
form_class = TForm
template_name = 'select2_outside_admin.html'
success_url = reverse_lazy('select2_outside_admin')
formset_class = inlineformset_factory(
TModel,
TModel,
form=TForm,
extra=1,
fk_name='for_inline',
fields=('name', 'test')
)
def get_object(self):
return TModel.objects.first()
def post(self, request, *args, **kwargs):
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
def form_valid(self, form):
result = super().form_valid(form)
self.formset.save()
return result
@property
def formset(self):
if '_formset' not in self.__dict__:
setattr(self, '_formset', self.formset_class(
self.request.POST if self.request.method == 'POST' else None,
instance=getattr(self, 'object', self.get_object()),
))
return self._formset
## Instruction:
Fix example outside the admin
## Code After:
try:
from django.urls import reverse_lazy
except ImportError:
from django.core.urlresolvers import reverse_lazy
from django.forms import inlineformset_factory
from django.views import generic
from select2_many_to_many.forms import TForm
from select2_many_to_many.models import TModel
class UpdateView(generic.UpdateView):
model = TModel
form_class = TForm
template_name = 'select2_outside_admin.html'
success_url = reverse_lazy('select2_outside_admin')
formset_class = inlineformset_factory(
TModel,
TModel,
form=TForm,
extra=1,
fk_name='for_inline',
fields=('name', 'test')
)
def get_object(self):
return TModel.objects.first()
def post(self, request, *args, **kwargs):
self.object = self.get_object()
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
def form_valid(self, form):
result = super().form_valid(form)
self.formset.save()
return result
@property
def formset(self):
if '_formset' not in self.__dict__:
setattr(self, '_formset', self.formset_class(
self.request.POST if self.request.method == 'POST' else None,
instance=getattr(self, 'object', self.get_object()),
))
return self._formset
|
# ... existing code ...
return TModel.objects.first()
def post(self, request, *args, **kwargs):
self.object = self.get_object()
form = self.get_form()
if form.is_valid() and self.formset.is_valid():
return self.form_valid(form)
# ... rest of the code ...
|
b0c8228755e6d86a77f3a74999216b31feb44a6b
|
webrtc/experiments.h
|
webrtc/experiments.h
|
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXPERIMENTS_H_
#define WEBRTC_EXPERIMENTS_H_
#include "webrtc/typedefs.h"
namespace webrtc {
struct RemoteBitrateEstimatorMinRate {
RemoteBitrateEstimatorMinRate() : min_rate(30000) {}
RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {}
uint32_t min_rate;
};
struct SkipEncodingUnusedStreams {
SkipEncodingUnusedStreams() : enabled(false) {}
explicit SkipEncodingUnusedStreams(bool set_enabled)
: enabled(set_enabled) {}
virtual ~SkipEncodingUnusedStreams() {}
const bool enabled;
};
struct AimdRemoteRateControl {
AimdRemoteRateControl() : enabled(false) {}
explicit AimdRemoteRateControl(bool set_enabled)
: enabled(set_enabled) {}
virtual ~AimdRemoteRateControl() {}
const bool enabled;
};
} // namespace webrtc
#endif // WEBRTC_EXPERIMENTS_H_
|
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXPERIMENTS_H_
#define WEBRTC_EXPERIMENTS_H_
#include "webrtc/typedefs.h"
namespace webrtc {
struct RemoteBitrateEstimatorMinRate {
RemoteBitrateEstimatorMinRate() : min_rate(30000) {}
RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {}
uint32_t min_rate;
};
struct AimdRemoteRateControl {
AimdRemoteRateControl() : enabled(false) {}
explicit AimdRemoteRateControl(bool set_enabled)
: enabled(set_enabled) {}
virtual ~AimdRemoteRateControl() {}
const bool enabled;
};
} // namespace webrtc
#endif // WEBRTC_EXPERIMENTS_H_
|
Remove no longer used SkipEncodingUnusedStreams.
|
Remove no longer used SkipEncodingUnusedStreams.
[email protected]
Review URL: https://webrtc-codereview.appspot.com/18829004
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@6753 4adac7df-926f-26a2-2b94-8c16560cd09d
|
C
|
bsd-3-clause
|
mwgoldsmith/libilbc,TimothyGu/libilbc,mwgoldsmith/ilbc,mwgoldsmith/ilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,mwgoldsmith/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc,ShiftMediaProject/libilbc,mwgoldsmith/ilbc,mwgoldsmith/libilbc,mwgoldsmith/ilbc,TimothyGu/libilbc,TimothyGu/libilbc,mwgoldsmith/libilbc
|
c
|
## Code Before:
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXPERIMENTS_H_
#define WEBRTC_EXPERIMENTS_H_
#include "webrtc/typedefs.h"
namespace webrtc {
struct RemoteBitrateEstimatorMinRate {
RemoteBitrateEstimatorMinRate() : min_rate(30000) {}
RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {}
uint32_t min_rate;
};
struct SkipEncodingUnusedStreams {
SkipEncodingUnusedStreams() : enabled(false) {}
explicit SkipEncodingUnusedStreams(bool set_enabled)
: enabled(set_enabled) {}
virtual ~SkipEncodingUnusedStreams() {}
const bool enabled;
};
struct AimdRemoteRateControl {
AimdRemoteRateControl() : enabled(false) {}
explicit AimdRemoteRateControl(bool set_enabled)
: enabled(set_enabled) {}
virtual ~AimdRemoteRateControl() {}
const bool enabled;
};
} // namespace webrtc
#endif // WEBRTC_EXPERIMENTS_H_
## Instruction:
Remove no longer used SkipEncodingUnusedStreams.
[email protected]
Review URL: https://webrtc-codereview.appspot.com/18829004
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@6753 4adac7df-926f-26a2-2b94-8c16560cd09d
## Code After:
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXPERIMENTS_H_
#define WEBRTC_EXPERIMENTS_H_
#include "webrtc/typedefs.h"
namespace webrtc {
struct RemoteBitrateEstimatorMinRate {
RemoteBitrateEstimatorMinRate() : min_rate(30000) {}
RemoteBitrateEstimatorMinRate(uint32_t min_rate) : min_rate(min_rate) {}
uint32_t min_rate;
};
struct AimdRemoteRateControl {
AimdRemoteRateControl() : enabled(false) {}
explicit AimdRemoteRateControl(bool set_enabled)
: enabled(set_enabled) {}
virtual ~AimdRemoteRateControl() {}
const bool enabled;
};
} // namespace webrtc
#endif // WEBRTC_EXPERIMENTS_H_
|
// ... existing code ...
uint32_t min_rate;
};
struct AimdRemoteRateControl {
AimdRemoteRateControl() : enabled(false) {}
explicit AimdRemoteRateControl(bool set_enabled)
// ... rest of the code ...
|
61768b0b8e04f3a2c8353db4c2e332b499a5c03a
|
Test/MathLib.h
|
Test/MathLib.h
|
//MathLib.h
#ifndef _MATHLIB_
#define _MATHLIB_
#endif
|
//MathLib.h
#ifndef _MATHLIB_
#define _MATHLIB_
//Make some change to check if new branch is created.
#endif
|
Make some change to check if new branch is created.
|
Make some change to check if new branch is created.
|
C
|
mit
|
mrlitong/fpsgame,mrlitong/fpsgame,mrlitong/Game-Engine-Development-Usage,mrlitong/fpsgame
|
c
|
## Code Before:
//MathLib.h
#ifndef _MATHLIB_
#define _MATHLIB_
#endif
## Instruction:
Make some change to check if new branch is created.
## Code After:
//MathLib.h
#ifndef _MATHLIB_
#define _MATHLIB_
//Make some change to check if new branch is created.
#endif
|
# ... existing code ...
#ifndef _MATHLIB_
#define _MATHLIB_
//Make some change to check if new branch is created.
#endif
# ... rest of the code ...
|
f1ab4741e908aef88a7ac7f0c375b75fe51c39b4
|
packages/Python/lldbsuite/test/lang/c/inlines/main.c
|
packages/Python/lldbsuite/test/lang/c/inlines/main.c
|
void test1(int) __attribute__ ((always_inline));
void test2(int) __attribute__ ((always_inline));
void test2(int b) {
printf("test2(%d)\n", b); //% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["42"])
}
void test1(int a) {
printf("test1(%d)\n", a);
test2(a+1);//% self.dbg.HandleCommand("step")
//% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["24"])
}
int main() {
test2(42);
test1(23);
}
|
inline void test1(int) __attribute__ ((always_inline));
inline void test2(int) __attribute__ ((always_inline));
void test2(int b) {
printf("test2(%d)\n", b); //% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["42"])
}
void test1(int a) {
printf("test1(%d)\n", a);
test2(a+1);//% self.dbg.HandleCommand("step")
//% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["24"])
}
int main() {
test2(42);
test1(23);
}
|
Make sure TestRedefinitionsInInlines.py actually inlines.
|
Make sure TestRedefinitionsInInlines.py actually inlines.
Reviewers: spyffe
Subscribers: lldb-commits
Differential Revision: http://reviews.llvm.org/D20540
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@270493 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb
|
c
|
## Code Before:
void test1(int) __attribute__ ((always_inline));
void test2(int) __attribute__ ((always_inline));
void test2(int b) {
printf("test2(%d)\n", b); //% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["42"])
}
void test1(int a) {
printf("test1(%d)\n", a);
test2(a+1);//% self.dbg.HandleCommand("step")
//% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["24"])
}
int main() {
test2(42);
test1(23);
}
## Instruction:
Make sure TestRedefinitionsInInlines.py actually inlines.
Reviewers: spyffe
Subscribers: lldb-commits
Differential Revision: http://reviews.llvm.org/D20540
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@270493 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
inline void test1(int) __attribute__ ((always_inline));
inline void test2(int) __attribute__ ((always_inline));
void test2(int b) {
printf("test2(%d)\n", b); //% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["42"])
}
void test1(int a) {
printf("test1(%d)\n", a);
test2(a+1);//% self.dbg.HandleCommand("step")
//% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["24"])
}
int main() {
test2(42);
test1(23);
}
|
# ... existing code ...
inline void test1(int) __attribute__ ((always_inline));
inline void test2(int) __attribute__ ((always_inline));
void test2(int b) {
printf("test2(%d)\n", b); //% self.expect("expression b", DATA_TYPES_DISPLAYED_CORRECTLY, substrs = ["42"])
# ... rest of the code ...
|
47b31c05b71bedd540810d95c10189c4d4580534
|
src/tag.h
|
src/tag.h
|
class Tag {
public:
Tag(tagEntry entry) {
name = entry.name;
file = entry.file;
kind = entry.kind != NULL ? entry.kind : "";
if (entry.address.pattern != NULL)
pattern = entry.address.pattern;
else
pattern = "";
}
std::string name;
std::string file;
std::string kind;
std::string pattern;
};
#endif // SRC_TAG_H_
|
class Tag {
public:
Tag(tagEntry entry) {
name = entry.name;
file = entry.file;
kind = entry.kind != NULL ? entry.kind : "";
pattern = entry.address.pattern != NULL ? entry.address.pattern : "";
}
std::string name;
std::string file;
std::string kind;
std::string pattern;
};
#endif // SRC_TAG_H_
|
Switch pattern's check for NULL to ternary operator
|
Switch pattern's check for NULL to ternary operator
(To be consistent with “kind”’s check)
|
C
|
mit
|
zertosh/ctags-prebuilt,zertosh/ctags-prebuilt,atom/node-ctags,zertosh/ctags-prebuilt,atom/node-ctags,yongkangchen/node-ctags,yongkangchen/node-ctags,yongkangchen/node-ctags,zertosh/ctags-prebuilt,atom/node-ctags
|
c
|
## Code Before:
class Tag {
public:
Tag(tagEntry entry) {
name = entry.name;
file = entry.file;
kind = entry.kind != NULL ? entry.kind : "";
if (entry.address.pattern != NULL)
pattern = entry.address.pattern;
else
pattern = "";
}
std::string name;
std::string file;
std::string kind;
std::string pattern;
};
#endif // SRC_TAG_H_
## Instruction:
Switch pattern's check for NULL to ternary operator
(To be consistent with “kind”’s check)
## Code After:
class Tag {
public:
Tag(tagEntry entry) {
name = entry.name;
file = entry.file;
kind = entry.kind != NULL ? entry.kind : "";
pattern = entry.address.pattern != NULL ? entry.address.pattern : "";
}
std::string name;
std::string file;
std::string kind;
std::string pattern;
};
#endif // SRC_TAG_H_
|
# ... existing code ...
name = entry.name;
file = entry.file;
kind = entry.kind != NULL ? entry.kind : "";
pattern = entry.address.pattern != NULL ? entry.address.pattern : "";
}
std::string name;
# ... rest of the code ...
|
f3b877cba4ab0ffdfc553f65547305698ef16391
|
src/BLEDeviceLimits.h
|
src/BLEDeviceLimits.h
|
// Copyright (c) Sandeep Mistry. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#ifndef _BLE_LIMITS_H_
#define _BLE_LIMITS_H_
#include <stdlib.h>
#ifndef __AVR__
#ifndef max
#define max(a,b) (((a) > (b)) ? (a) : (b))
#endif
#ifndef min
#define min(a,b) (((a) < (b)) ? (a) : (b))
#endif
#endif
#if defined(NRF51) || defined(__RFduino__)
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 26
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 29
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 29
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#else
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 20
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 20
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 20
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#endif
#endif
|
// Copyright (c) Sandeep Mistry. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#ifndef _BLE_LIMITS_H_
#define _BLE_LIMITS_H_
#include <stdlib.h>
#ifndef __AVR__
#ifndef max
#define max(a,b) (((a) > (b)) ? (a) : (b))
#endif
#ifndef min
#define min(a,b) (((a) < (b)) ? (a) : (b))
#endif
#endif
#if defined(NRF51) || defined(NRF52) || defined(__RFduino__)
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 26
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 29
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 29
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#else
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 20
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 20
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 20
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#endif
#endif
|
Fix length issues with nRF52
|
Fix length issues with nRF52
|
C
|
mit
|
sandeepmistry/arduino-BLEPeripheral,sandeepmistry/arduino-BLEPeripheral,sandeepmistry/arduino-BLEPeripheral
|
c
|
## Code Before:
// Copyright (c) Sandeep Mistry. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#ifndef _BLE_LIMITS_H_
#define _BLE_LIMITS_H_
#include <stdlib.h>
#ifndef __AVR__
#ifndef max
#define max(a,b) (((a) > (b)) ? (a) : (b))
#endif
#ifndef min
#define min(a,b) (((a) < (b)) ? (a) : (b))
#endif
#endif
#if defined(NRF51) || defined(__RFduino__)
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 26
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 29
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 29
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#else
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 20
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 20
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 20
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#endif
#endif
## Instruction:
Fix length issues with nRF52
## Code After:
// Copyright (c) Sandeep Mistry. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#ifndef _BLE_LIMITS_H_
#define _BLE_LIMITS_H_
#include <stdlib.h>
#ifndef __AVR__
#ifndef max
#define max(a,b) (((a) > (b)) ? (a) : (b))
#endif
#ifndef min
#define min(a,b) (((a) < (b)) ? (a) : (b))
#endif
#endif
#if defined(NRF51) || defined(NRF52) || defined(__RFduino__)
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 26
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 29
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 29
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#else
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 20
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 20
#define BLE_EIR_DATA_MAX_VALUE_LENGTH 20
#define BLE_ATTRIBUTE_MAX_VALUE_LENGTH 20
#define BLE_REMOTE_ATTRIBUTE_MAX_VALUE_LENGTH 22
#endif
#endif
|
...
#endif
#if defined(NRF51) || defined(NRF52) || defined(__RFduino__)
#define BLE_ADVERTISEMENT_DATA_MAX_VALUE_LENGTH 26
#define BLE_SCAN_DATA_MAX_VALUE_LENGTH 29
...
|
bcec4724dc434218f7b2bce0aaabf391f86847b6
|
ocradmin/core/decorators.py
|
ocradmin/core/decorators.py
|
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
Add plugins to the domains which handle temp files
|
Add plugins to the domains which handle temp files
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
python
|
## Code Before:
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith("/ocr/")
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
## Instruction:
Add plugins to the domains which handle temp files
## Code After:
import os
from datetime import datetime
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.conf import settings
def project_required(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
path = urlquote(request.get_full_path())
if not request.session.get("project"):
return HttpResponseRedirect("/projects/list/?next=%s" % path)
return func(request, *args, **kwargs)
return wrapper
def saves_files(func):
"""
Decorator function for other actions that
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
temp = True
if temp:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.TEMP_PATH,
request.user.username,
datetime.now().strftime("%Y%m%d%H%M%S")
)
else:
output_path = os.path.join(
settings.MEDIA_ROOT,
settings.USER_FILES_PATH,
project.slug
)
request.__class__.output_path = output_path
return func(request, *args, **kwargs)
return wrapper
|
// ... existing code ...
require a project to be open in the session.
"""
def wrapper(request, *args, **kwargs):
temp = request.path.startswith(("/ocr/", "/plugins/"))
project = request.session.get("project")
output_path = None
if project is None:
// ... rest of the code ...
|
fc2fb305b4f814a4881fdd5c0f3fc755d589748f
|
sheath-example/src/main/java/coffee/CoffeeApp.java
|
sheath-example/src/main/java/coffee/CoffeeApp.java
|
package coffee;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.dom.client.Document;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Button;
import javax.inject.Inject;
import sheath.Modules;
import sheath.SheathEntryPoint;
class CoffeeApp implements EntryPoint {
@Modules(DripCoffeeModule.class)
interface CoffeeGagger extends SheathEntryPoint<CoffeeApp> { }
@Inject CoffeeMaker coffeeMaker;
@Override
public void onModuleLoad() {
CoffeeGagger gagger = GWT.create(CoffeeGagger.class);
gagger.inject(this);
Button btn = Button.wrap(Document.get().getElementById("coffeeMaker"));
btn.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
coffeeMaker.brew();
}
});
btn.setEnabled(true);
}
}
|
package coffee;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.dom.client.Document;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Button;
import javax.inject.Inject;
import sheath.Modules;
import sheath.SheathEntryPoint;
class CoffeeApp implements EntryPoint {
@Modules(DripCoffeeModule.class)
interface CoffeeGraph extends SheathEntryPoint<CoffeeApp> { }
@Inject CoffeeMaker coffeeMaker;
@Override
public void onModuleLoad() {
CoffeeGraph graph = GWT.create(CoffeeGraph.class);
graph.inject(this);
Button btn = Button.wrap(Document.get().getElementById("coffeeMaker"));
btn.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
coffeeMaker.brew();
}
});
btn.setEnabled(true);
}
}
|
Remove references to old-name _Gagger_ in the sample.
|
Remove references to old-name _Gagger_ in the sample.
|
Java
|
apache-2.0
|
tbroyer/sheath
|
java
|
## Code Before:
package coffee;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.dom.client.Document;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Button;
import javax.inject.Inject;
import sheath.Modules;
import sheath.SheathEntryPoint;
class CoffeeApp implements EntryPoint {
@Modules(DripCoffeeModule.class)
interface CoffeeGagger extends SheathEntryPoint<CoffeeApp> { }
@Inject CoffeeMaker coffeeMaker;
@Override
public void onModuleLoad() {
CoffeeGagger gagger = GWT.create(CoffeeGagger.class);
gagger.inject(this);
Button btn = Button.wrap(Document.get().getElementById("coffeeMaker"));
btn.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
coffeeMaker.brew();
}
});
btn.setEnabled(true);
}
}
## Instruction:
Remove references to old-name _Gagger_ in the sample.
## Code After:
package coffee;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.dom.client.Document;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Button;
import javax.inject.Inject;
import sheath.Modules;
import sheath.SheathEntryPoint;
class CoffeeApp implements EntryPoint {
@Modules(DripCoffeeModule.class)
interface CoffeeGraph extends SheathEntryPoint<CoffeeApp> { }
@Inject CoffeeMaker coffeeMaker;
@Override
public void onModuleLoad() {
CoffeeGraph graph = GWT.create(CoffeeGraph.class);
graph.inject(this);
Button btn = Button.wrap(Document.get().getElementById("coffeeMaker"));
btn.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
coffeeMaker.brew();
}
});
btn.setEnabled(true);
}
}
|
...
class CoffeeApp implements EntryPoint {
@Modules(DripCoffeeModule.class)
interface CoffeeGraph extends SheathEntryPoint<CoffeeApp> { }
@Inject CoffeeMaker coffeeMaker;
@Override
public void onModuleLoad() {
CoffeeGraph graph = GWT.create(CoffeeGraph.class);
graph.inject(this);
Button btn = Button.wrap(Document.get().getElementById("coffeeMaker"));
btn.addClickHandler(new ClickHandler() {
...
|
26a78ad13bf4f87785501266fdde09f0053a97da
|
modules/qi-core/src/main/java/org/jpos/util/AmountConverter.java
|
modules/qi-core/src/main/java/org/jpos/util/AmountConverter.java
|
package org.jpos.util;
import com.vaadin.data.converter.StringToBigDecimalConverter;
import java.text.NumberFormat;
import java.util.Locale;
public class AmountConverter extends StringToBigDecimalConverter {
public AmountConverter(String errorMessage) {
super(errorMessage);
}
@Override
protected NumberFormat getFormat(Locale locale) {
NumberFormat amountFormat = NumberFormat.getInstance();
amountFormat.setGroupingUsed(true);
amountFormat.setMinimumFractionDigits(2);
return amountFormat;
}
}
|
package org.jpos.util;
import com.vaadin.data.converter.StringToBigDecimalConverter;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Locale;
public class AmountConverter extends StringToBigDecimalConverter {
public AmountConverter(String errorMessage) {
super(errorMessage);
}
@Override
protected NumberFormat getFormat(Locale locale) {
NumberFormat amountFormat = NumberFormat.getInstance();
amountFormat.setGroupingUsed(true);
amountFormat.setMinimumFractionDigits(2);
if (amountFormat instanceof DecimalFormat) {
((DecimalFormat) amountFormat).setParseBigDecimal(true);
}
return amountFormat;
}
}
|
Make parse method of NumberFormat always return a BigDecimal
|
Make parse method of NumberFormat always return a BigDecimal
|
Java
|
agpl-3.0
|
barspi/jPOS-EE,jpos/jPOS-EE,barspi/jPOS-EE,jpos/jPOS-EE,barspi/jPOS-EE,jrfinc/jPOS-EE,jrfinc/jPOS-EE,jpos/jPOS-EE,jrfinc/jPOS-EE
|
java
|
## Code Before:
package org.jpos.util;
import com.vaadin.data.converter.StringToBigDecimalConverter;
import java.text.NumberFormat;
import java.util.Locale;
public class AmountConverter extends StringToBigDecimalConverter {
public AmountConverter(String errorMessage) {
super(errorMessage);
}
@Override
protected NumberFormat getFormat(Locale locale) {
NumberFormat amountFormat = NumberFormat.getInstance();
amountFormat.setGroupingUsed(true);
amountFormat.setMinimumFractionDigits(2);
return amountFormat;
}
}
## Instruction:
Make parse method of NumberFormat always return a BigDecimal
## Code After:
package org.jpos.util;
import com.vaadin.data.converter.StringToBigDecimalConverter;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Locale;
public class AmountConverter extends StringToBigDecimalConverter {
public AmountConverter(String errorMessage) {
super(errorMessage);
}
@Override
protected NumberFormat getFormat(Locale locale) {
NumberFormat amountFormat = NumberFormat.getInstance();
amountFormat.setGroupingUsed(true);
amountFormat.setMinimumFractionDigits(2);
if (amountFormat instanceof DecimalFormat) {
((DecimalFormat) amountFormat).setParseBigDecimal(true);
}
return amountFormat;
}
}
|
// ... existing code ...
import com.vaadin.data.converter.StringToBigDecimalConverter;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Locale;
// ... modified code ...
NumberFormat amountFormat = NumberFormat.getInstance();
amountFormat.setGroupingUsed(true);
amountFormat.setMinimumFractionDigits(2);
if (amountFormat instanceof DecimalFormat) {
((DecimalFormat) amountFormat).setParseBigDecimal(true);
}
return amountFormat;
}
// ... rest of the code ...
|
a4574def7875f8f0eeffddd1e6fc97b79a89caaf
|
apptentive/src/com/apptentive/android/sdk/module/rating/view/ApptentiveBaseDialog.java
|
apptentive/src/com/apptentive/android/sdk/module/rating/view/ApptentiveBaseDialog.java
|
/*
* Copyright (c) 2014, Apptentive, Inc. All Rights Reserved.
* Please refer to the LICENSE file for the terms and conditions
* under which redistribution and use of this file is permitted.
*/
package com.apptentive.android.sdk.module.rating.view;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.view.Gravity;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
/**
* @author Sky Kelsey
*/
public abstract class ApptentiveBaseDialog extends Dialog {
public ApptentiveBaseDialog(final Context context, int layout) {
super(context);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(layout); // This needs to be set before the window layout is messed with below.
// Let the dialog take up the whole device width.
WindowManager.LayoutParams params = getWindow().getAttributes();
params.width = ViewGroup.LayoutParams.FILL_PARENT;
params.height = ViewGroup.LayoutParams.FILL_PARENT;
params.gravity = Gravity.CENTER;
params.dimAmount = 0.5f;
getWindow().setAttributes(params);
getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
getWindow().addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
}
}
|
/*
* Copyright (c) 2014, Apptentive, Inc. All Rights Reserved.
* Please refer to the LICENSE file for the terms and conditions
* under which redistribution and use of this file is permitted.
*/
package com.apptentive.android.sdk.module.rating.view;
import android.app.Dialog;
import android.content.Context;
import android.graphics.drawable.ColorDrawable;
import android.view.Gravity;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
/**
* @author Sky Kelsey
*/
public abstract class ApptentiveBaseDialog extends Dialog {
public ApptentiveBaseDialog(final Context context, int layout) {
super(context);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(layout); // This needs to be set before the window layout is messed with below.
// Let the dialog take up the whole device width.
WindowManager.LayoutParams params = getWindow().getAttributes();
params.width = ViewGroup.LayoutParams.FILL_PARENT;
params.height = ViewGroup.LayoutParams.FILL_PARENT;
params.gravity = Gravity.CENTER;
getWindow().setAttributes(params);
getWindow().setBackgroundDrawable(new ColorDrawable(0x7F000000));
}
}
|
Use alpha layer instead of window flag for dimming.
|
Use alpha layer instead of window flag for dimming.
|
Java
|
bsd-3-clause
|
apptentive/apptentive-android,mikandi/apptentive-android
|
java
|
## Code Before:
/*
* Copyright (c) 2014, Apptentive, Inc. All Rights Reserved.
* Please refer to the LICENSE file for the terms and conditions
* under which redistribution and use of this file is permitted.
*/
package com.apptentive.android.sdk.module.rating.view;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.view.Gravity;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
/**
* @author Sky Kelsey
*/
public abstract class ApptentiveBaseDialog extends Dialog {
public ApptentiveBaseDialog(final Context context, int layout) {
super(context);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(layout); // This needs to be set before the window layout is messed with below.
// Let the dialog take up the whole device width.
WindowManager.LayoutParams params = getWindow().getAttributes();
params.width = ViewGroup.LayoutParams.FILL_PARENT;
params.height = ViewGroup.LayoutParams.FILL_PARENT;
params.gravity = Gravity.CENTER;
params.dimAmount = 0.5f;
getWindow().setAttributes(params);
getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
getWindow().addFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND);
}
}
## Instruction:
Use alpha layer instead of window flag for dimming.
## Code After:
/*
* Copyright (c) 2014, Apptentive, Inc. All Rights Reserved.
* Please refer to the LICENSE file for the terms and conditions
* under which redistribution and use of this file is permitted.
*/
package com.apptentive.android.sdk.module.rating.view;
import android.app.Dialog;
import android.content.Context;
import android.graphics.drawable.ColorDrawable;
import android.view.Gravity;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
/**
* @author Sky Kelsey
*/
public abstract class ApptentiveBaseDialog extends Dialog {
public ApptentiveBaseDialog(final Context context, int layout) {
super(context);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(layout); // This needs to be set before the window layout is messed with below.
// Let the dialog take up the whole device width.
WindowManager.LayoutParams params = getWindow().getAttributes();
params.width = ViewGroup.LayoutParams.FILL_PARENT;
params.height = ViewGroup.LayoutParams.FILL_PARENT;
params.gravity = Gravity.CENTER;
getWindow().setAttributes(params);
getWindow().setBackgroundDrawable(new ColorDrawable(0x7F000000));
}
}
|
...
import android.app.Dialog;
import android.content.Context;
import android.graphics.drawable.ColorDrawable;
import android.view.Gravity;
import android.view.ViewGroup;
...
params.width = ViewGroup.LayoutParams.FILL_PARENT;
params.height = ViewGroup.LayoutParams.FILL_PARENT;
params.gravity = Gravity.CENTER;
getWindow().setAttributes(params);
getWindow().setBackgroundDrawable(new ColorDrawable(0x7F000000));
}
}
...
|
0f2284c7545819484849b2afb39fa34dd2730545
|
tests/integration_tests.c
|
tests/integration_tests.c
|
void test_timedep(void)
{
double *populations;
int num_pop;
/* Initialize QuaC */
timedep_test(&populations,&num_pop);
/* These values assume TSRK3BS */
TEST_ASSERT_EQUAL_FLOAT(populations[0],-1.487990e-04);
TEST_ASSERT_EQUAL_FLOAT(populations[1],1.799424e-04);
}
int main(int argc, char** argv)
{
UNITY_BEGIN();
QuaC_initialize(argc,argv);
RUN_TEST(test_timedep);
QuaC_finalize();
return UNITY_END();
}
|
void test_timedep(void)
{
double *populations;
int num_pop;
/* Initialize QuaC */
timedep_test(&populations,&num_pop);
/* These values assume TSRK3BS */
/* TEST_ASSERT_EQUAL_FLOAT(0.0,populations[0]); */
/* TEST_ASSERT_EQUAL_FLOAT(0.0,populations[1]); */
}
int main(int argc, char** argv)
{
UNITY_BEGIN();
QuaC_initialize(argc,argv);
RUN_TEST(test_timedep);
QuaC_finalize();
return UNITY_END();
}
|
Remove time_dep test for now - it is too dependent on exact conditions.
|
Remove time_dep test for now - it is too dependent on exact conditions.
|
C
|
mit
|
0tt3r/QuaC,0tt3r/QuaC,0tt3r/QuaC
|
c
|
## Code Before:
void test_timedep(void)
{
double *populations;
int num_pop;
/* Initialize QuaC */
timedep_test(&populations,&num_pop);
/* These values assume TSRK3BS */
TEST_ASSERT_EQUAL_FLOAT(populations[0],-1.487990e-04);
TEST_ASSERT_EQUAL_FLOAT(populations[1],1.799424e-04);
}
int main(int argc, char** argv)
{
UNITY_BEGIN();
QuaC_initialize(argc,argv);
RUN_TEST(test_timedep);
QuaC_finalize();
return UNITY_END();
}
## Instruction:
Remove time_dep test for now - it is too dependent on exact conditions.
## Code After:
void test_timedep(void)
{
double *populations;
int num_pop;
/* Initialize QuaC */
timedep_test(&populations,&num_pop);
/* These values assume TSRK3BS */
/* TEST_ASSERT_EQUAL_FLOAT(0.0,populations[0]); */
/* TEST_ASSERT_EQUAL_FLOAT(0.0,populations[1]); */
}
int main(int argc, char** argv)
{
UNITY_BEGIN();
QuaC_initialize(argc,argv);
RUN_TEST(test_timedep);
QuaC_finalize();
return UNITY_END();
}
|
...
timedep_test(&populations,&num_pop);
/* These values assume TSRK3BS */
/* TEST_ASSERT_EQUAL_FLOAT(0.0,populations[0]); */
/* TEST_ASSERT_EQUAL_FLOAT(0.0,populations[1]); */
}
...
|
b14e605c83f95e6e1a3c70f148c32bbdc0ca12b1
|
zeus/api/resources/build_index.py
|
zeus/api/resources/build_index.py
|
from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
|
from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
|
Add pagination to build index
|
feat: Add pagination to build index
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
python
|
## Code Before:
from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc()).limit(100)
return self.respond_with_schema(builds_schema, query)
## Instruction:
feat: Add pagination to build index
## Code After:
from sqlalchemy.orm import joinedload, subqueryload_all
from zeus import auth
from zeus.models import Build
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class BuildIndexResource(Resource):
def get(self):
"""
Return a list of builds.
"""
# tenants automatically restrict this query but we dont want
# to include public repos
tenant = auth.get_current_tenant()
if not tenant.repository_ids:
return self.respond([])
query = Build.query.options(
joinedload('repository'),
joinedload('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
|
# ... existing code ...
subqueryload_all('stats'),
).filter(
Build.repository_id.in_(tenant.repository_ids),
).order_by(Build.date_created.desc())
return self.paginate_with_schema(builds_schema, query)
# ... rest of the code ...
|
401e60837c13af5a350b1487225a296c2e803069
|
Lib/test/test_dumbdbm.py
|
Lib/test/test_dumbdbm.py
|
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
Use a saner test filename, to work on Windows.
|
Use a saner test filename, to work on Windows.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
python
|
## Code Before:
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
## Instruction:
Use a saner test filename, to work on Windows.
## Code After:
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
// ... existing code ...
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
// ... rest of the code ...
|
711c992a89f9a6118d2b274e2a526be62e670a92
|
examples/flask_server.py
|
examples/flask_server.py
|
from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
|
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
Set content-type in flask example
|
Set content-type in flask example
|
Python
|
mit
|
bcb/jsonrpcserver
|
python
|
## Code Before:
from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
## Instruction:
Set content-type in flask example
## Code After:
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
# ... existing code ...
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
# ... modified code ...
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
# ... rest of the code ...
|
038af07eb385a9b3ed6d008c3db646c1fd885f07
|
app/src/org/commcare/android/tasks/ResourceEngineListener.java
|
app/src/org/commcare/android/tasks/ResourceEngineListener.java
|
package org.commcare.android.tasks;
import org.commcare.android.tasks.ResourceEngineTask.ResourceEngineOutcomes;
import org.commcare.resources.model.UnresolvedResourceException;
public interface ResourceEngineListener {
public void reportSuccess(boolean b);
public void failMissingResource(UnresolvedResourceException ure, ResourceEngineOutcomes statusmissing);
public void failBadReqs(int code, String vReq, String vAvail, boolean majorIsProblem);
public void failUnknown(ResourceEngineOutcomes statusfailunknown);
public void updateResourceProgress(int done, int pending, int phase);
public void failWithNotification(ResourceEngineOutcomes statusfailstate);
}
|
package org.commcare.android.tasks;
import org.commcare.android.tasks.ResourceEngineTask.ResourceEngineOutcomes;
import org.commcare.resources.model.UnresolvedResourceException;
public interface ResourceEngineListener {
void reportSuccess(boolean b);
void failMissingResource(UnresolvedResourceException ure, ResourceEngineOutcomes statusmissing);
void failBadReqs(int code, String vReq, String vAvail, boolean majorIsProblem);
void failUnknown(ResourceEngineOutcomes statusfailunknown);
void updateResourceProgress(int done, int pending, int phase);
void failWithNotification(ResourceEngineOutcomes statusfailstate);
}
|
Remove redundant 'public' keyword from interface methods
|
Remove redundant 'public' keyword from interface methods
|
Java
|
apache-2.0
|
dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android
|
java
|
## Code Before:
package org.commcare.android.tasks;
import org.commcare.android.tasks.ResourceEngineTask.ResourceEngineOutcomes;
import org.commcare.resources.model.UnresolvedResourceException;
public interface ResourceEngineListener {
public void reportSuccess(boolean b);
public void failMissingResource(UnresolvedResourceException ure, ResourceEngineOutcomes statusmissing);
public void failBadReqs(int code, String vReq, String vAvail, boolean majorIsProblem);
public void failUnknown(ResourceEngineOutcomes statusfailunknown);
public void updateResourceProgress(int done, int pending, int phase);
public void failWithNotification(ResourceEngineOutcomes statusfailstate);
}
## Instruction:
Remove redundant 'public' keyword from interface methods
## Code After:
package org.commcare.android.tasks;
import org.commcare.android.tasks.ResourceEngineTask.ResourceEngineOutcomes;
import org.commcare.resources.model.UnresolvedResourceException;
public interface ResourceEngineListener {
void reportSuccess(boolean b);
void failMissingResource(UnresolvedResourceException ure, ResourceEngineOutcomes statusmissing);
void failBadReqs(int code, String vReq, String vAvail, boolean majorIsProblem);
void failUnknown(ResourceEngineOutcomes statusfailunknown);
void updateResourceProgress(int done, int pending, int phase);
void failWithNotification(ResourceEngineOutcomes statusfailstate);
}
|
...
import org.commcare.resources.model.UnresolvedResourceException;
public interface ResourceEngineListener {
void reportSuccess(boolean b);
void failMissingResource(UnresolvedResourceException ure, ResourceEngineOutcomes statusmissing);
void failBadReqs(int code, String vReq, String vAvail, boolean majorIsProblem);
void failUnknown(ResourceEngineOutcomes statusfailunknown);
void updateResourceProgress(int done, int pending, int phase);
void failWithNotification(ResourceEngineOutcomes statusfailstate);
}
...
|
6fdc8bc13b4b71d9285009f996863aad8af046b2
|
io/block_channel.h
|
io/block_channel.h
|
class Action;
class Buffer;
class BlockChannel {
protected:
BlockChannel(void)
{ }
public:
virtual ~BlockChannel()
{ }
virtual Action *close(EventCallback *) = 0;
virtual Action *read(off_t, EventCallback *) = 0;
virtual Action *write(off_t, Buffer *, EventCallback *) = 0;
};
#endif /* !BLOCK_CHANNEL_H */
|
class Action;
class Buffer;
class BlockChannel {
protected:
size_t bsize_;
BlockChannel(size_t bsize)
: bsize_(bsize)
{ }
public:
virtual ~BlockChannel()
{ }
virtual Action *close(EventCallback *) = 0;
virtual Action *read(off_t, EventCallback *) = 0;
virtual Action *write(off_t, Buffer *, EventCallback *) = 0;
};
#endif /* !BLOCK_CHANNEL_H */
|
Make block size a protected member of a block channel.
|
Make block size a protected member of a block channel.
|
C
|
bsd-2-clause
|
wanproxy/wanproxy,wanproxy/wanproxy,wanproxy/wanproxy
|
c
|
## Code Before:
class Action;
class Buffer;
class BlockChannel {
protected:
BlockChannel(void)
{ }
public:
virtual ~BlockChannel()
{ }
virtual Action *close(EventCallback *) = 0;
virtual Action *read(off_t, EventCallback *) = 0;
virtual Action *write(off_t, Buffer *, EventCallback *) = 0;
};
#endif /* !BLOCK_CHANNEL_H */
## Instruction:
Make block size a protected member of a block channel.
## Code After:
class Action;
class Buffer;
class BlockChannel {
protected:
size_t bsize_;
BlockChannel(size_t bsize)
: bsize_(bsize)
{ }
public:
virtual ~BlockChannel()
{ }
virtual Action *close(EventCallback *) = 0;
virtual Action *read(off_t, EventCallback *) = 0;
virtual Action *write(off_t, Buffer *, EventCallback *) = 0;
};
#endif /* !BLOCK_CHANNEL_H */
|
// ... existing code ...
class BlockChannel {
protected:
size_t bsize_;
BlockChannel(size_t bsize)
: bsize_(bsize)
{ }
public:
// ... rest of the code ...
|
bdcfb1ff4c076485a5fc3b00beaf81becec0717b
|
tests/utils/DependencyChecker.py
|
tests/utils/DependencyChecker.py
|
import subprocess as subp
class DependencyChecker(object):
def _check_test_dependencies(self):
for dep in self.DEPENDENCIES:
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
" is not available.".format(dep)
self.fail(error)
|
import sys
import subprocess as subp
class DependencyChecker(object):
def _check_test_dependencies(self):
for dep in self.DEPENDENCIES:
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
if sys.version_info >= (3, 3) and isinstance(available, bytes):
available = available.decode('utf-8')
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
" is not available.".format(dep)
self.fail(error)
|
Fix binary to str conversion
|
release/0.6.2: Fix binary to str conversion
|
Python
|
bsd-3-clause
|
nok/sklearn-porter
|
python
|
## Code Before:
import subprocess as subp
class DependencyChecker(object):
def _check_test_dependencies(self):
for dep in self.DEPENDENCIES:
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
" is not available.".format(dep)
self.fail(error)
## Instruction:
release/0.6.2: Fix binary to str conversion
## Code After:
import sys
import subprocess as subp
class DependencyChecker(object):
def _check_test_dependencies(self):
for dep in self.DEPENDENCIES:
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
if sys.version_info >= (3, 3) and isinstance(available, bytes):
available = available.decode('utf-8')
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
" is not available.".format(dep)
self.fail(error)
|
...
import sys
import subprocess as subp
...
cmd = 'if hash {} 2/dev/null; then ' \
'echo 1; else echo 0; fi'.format(dep)
available = subp.check_output(cmd, shell=True, stderr=subp.STDOUT)
if sys.version_info >= (3, 3) and isinstance(available, bytes):
available = available.decode('utf-8')
available = available.strip() is '1'
if not available:
error = "The required test dependency '{0}'" \
...
|
7c0c349656e6f02be0f3f0044f5d225f3688be08
|
bong/parse_args.py
|
bong/parse_args.py
|
from .settings import BongSettings, DEFAULT_MESSAGE
from .metadata import VERSION, SUMMARY
import argparse
PARSER = argparse.ArgumentParser(description=SUMMARY)
PARSER.add_argument('-V', '--version', action='version', version=VERSION,
help='Show version')
PARSER.add_argument('-s', '--short-break', action='store_const', const=5,
dest='minutes', default=25,
help='Time for a Pomodoro system short break')
PARSER.add_argument('-l', '--long-break', action='store_const',
const=15, dest='minutes',
help='Time for a Pomodoro system long break')
PARSER.add_argument('-p', '--pomodoro', action='store_const',
const=25, dest='minutes',
help='Time for a Pomodoro system single Pomodoro')
PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes',
help='Timer length, in minutes')
PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE,
help='Message to display in the notifier')
def parse_args(args):
settings = PARSER.parse_args(args)
return BongSettings(time=60*settings.minutes, message=settings.message)
|
from .settings import BongSettings, DEFAULT_MESSAGE
from .metadata import VERSION, SUMMARY
import argparse
PARSER = argparse.ArgumentParser(description=SUMMARY)
PARSER.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(VERSION),
help='show version')
PARSER.add_argument('-s', '--short-break', action='store_const', const=5,
dest='minutes', default=25,
help='time for a Pomodoro system short break')
PARSER.add_argument('-l', '--long-break', action='store_const',
const=15, dest='minutes',
help='time for a Pomodoro system long break')
PARSER.add_argument('-p', '--pomodoro', action='store_const',
const=25, dest='minutes',
help='time for a Pomodoro system single Pomodoro')
PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes',
help='timer length, in minutes')
PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE,
help='message to display in the notifier')
def parse_args(args):
settings = PARSER.parse_args(args)
return BongSettings(time=60*settings.minutes, message=settings.message)
|
Clean up the argument parsing
|
Clean up the argument parsing
|
Python
|
mit
|
prophile/bong
|
python
|
## Code Before:
from .settings import BongSettings, DEFAULT_MESSAGE
from .metadata import VERSION, SUMMARY
import argparse
PARSER = argparse.ArgumentParser(description=SUMMARY)
PARSER.add_argument('-V', '--version', action='version', version=VERSION,
help='Show version')
PARSER.add_argument('-s', '--short-break', action='store_const', const=5,
dest='minutes', default=25,
help='Time for a Pomodoro system short break')
PARSER.add_argument('-l', '--long-break', action='store_const',
const=15, dest='minutes',
help='Time for a Pomodoro system long break')
PARSER.add_argument('-p', '--pomodoro', action='store_const',
const=25, dest='minutes',
help='Time for a Pomodoro system single Pomodoro')
PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes',
help='Timer length, in minutes')
PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE,
help='Message to display in the notifier')
def parse_args(args):
settings = PARSER.parse_args(args)
return BongSettings(time=60*settings.minutes, message=settings.message)
## Instruction:
Clean up the argument parsing
## Code After:
from .settings import BongSettings, DEFAULT_MESSAGE
from .metadata import VERSION, SUMMARY
import argparse
PARSER = argparse.ArgumentParser(description=SUMMARY)
PARSER.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(VERSION),
help='show version')
PARSER.add_argument('-s', '--short-break', action='store_const', const=5,
dest='minutes', default=25,
help='time for a Pomodoro system short break')
PARSER.add_argument('-l', '--long-break', action='store_const',
const=15, dest='minutes',
help='time for a Pomodoro system long break')
PARSER.add_argument('-p', '--pomodoro', action='store_const',
const=25, dest='minutes',
help='time for a Pomodoro system single Pomodoro')
PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes',
help='timer length, in minutes')
PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE,
help='message to display in the notifier')
def parse_args(args):
settings = PARSER.parse_args(args)
return BongSettings(time=60*settings.minutes, message=settings.message)
|
# ... existing code ...
import argparse
PARSER = argparse.ArgumentParser(description=SUMMARY)
PARSER.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(VERSION),
help='show version')
PARSER.add_argument('-s', '--short-break', action='store_const', const=5,
dest='minutes', default=25,
help='time for a Pomodoro system short break')
PARSER.add_argument('-l', '--long-break', action='store_const',
const=15, dest='minutes',
help='time for a Pomodoro system long break')
PARSER.add_argument('-p', '--pomodoro', action='store_const',
const=25, dest='minutes',
help='time for a Pomodoro system single Pomodoro')
PARSER.add_argument('-t', '--time', action='store', type=int, dest='minutes',
help='timer length, in minutes')
PARSER.add_argument('-m', '--message', default=DEFAULT_MESSAGE,
help='message to display in the notifier')
def parse_args(args):
# ... rest of the code ...
|
bd3e4c61a80488e3a6d237f77e55a3c86a45e7c0
|
src/services/org/opennms/netmgt/threshd/ThreshdTest.java
|
src/services/org/opennms/netmgt/threshd/ThreshdTest.java
|
package org.opennms.netmgt.threshd;
import org.opennms.netmgt.config.DatabaseConnectionFactory;
import org.opennms.netmgt.mock.MockDatabase;
import org.opennms.netmgt.mock.MockUtil;
import junit.framework.TestCase;
public class ThreshdTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(ThreshdTest.class);
}
protected void setUp() throws Exception {
super.setUp();
MockUtil.setupLogging();
MockDatabase db = new MockDatabase();
DatabaseConnectionFactory.setInstance(db);
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void xtestthreshd() {
Threshd threshd = new Threshd();
threshd.init();
threshd.start();
threshd.stop();
}
}
|
package org.opennms.netmgt.threshd;
import org.opennms.netmgt.config.DatabaseConnectionFactory;
import org.opennms.netmgt.mock.MockDatabase;
import org.opennms.netmgt.mock.MockUtil;
import junit.framework.TestCase;
public class ThreshdTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(ThreshdTest.class);
}
protected void setUp() throws Exception {
super.setUp();
MockUtil.setupLogging();
MockDatabase db = new MockDatabase();
DatabaseConnectionFactory.setInstance(db);
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void xtestthreshd() {
Threshd threshd = new Threshd();
threshd.init();
threshd.start();
threshd.stop();
}
public void testDoNothing() {
}
}
|
Add empty tests to the build doesn't complain
|
Add empty tests to the build doesn't complain
|
Java
|
agpl-3.0
|
rdkgit/opennms,tdefilip/opennms,aihua/opennms,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,aihua/opennms,aihua/opennms,tdefilip/opennms,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,aihua/opennms,aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,tdefilip/opennms,aihua/opennms,roskens/opennms-pre-github,tdefilip/opennms,tdefilip/opennms,aihua/opennms,tdefilip/opennms,tdefilip/opennms,rdkgit/opennms,aihua/opennms,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,aihua/opennms,rdkgit/opennms,rdkgit/opennms
|
java
|
## Code Before:
package org.opennms.netmgt.threshd;
import org.opennms.netmgt.config.DatabaseConnectionFactory;
import org.opennms.netmgt.mock.MockDatabase;
import org.opennms.netmgt.mock.MockUtil;
import junit.framework.TestCase;
public class ThreshdTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(ThreshdTest.class);
}
protected void setUp() throws Exception {
super.setUp();
MockUtil.setupLogging();
MockDatabase db = new MockDatabase();
DatabaseConnectionFactory.setInstance(db);
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void xtestthreshd() {
Threshd threshd = new Threshd();
threshd.init();
threshd.start();
threshd.stop();
}
}
## Instruction:
Add empty tests to the build doesn't complain
## Code After:
package org.opennms.netmgt.threshd;
import org.opennms.netmgt.config.DatabaseConnectionFactory;
import org.opennms.netmgt.mock.MockDatabase;
import org.opennms.netmgt.mock.MockUtil;
import junit.framework.TestCase;
public class ThreshdTest extends TestCase {
public static void main(String[] args) {
junit.textui.TestRunner.run(ThreshdTest.class);
}
protected void setUp() throws Exception {
super.setUp();
MockUtil.setupLogging();
MockDatabase db = new MockDatabase();
DatabaseConnectionFactory.setInstance(db);
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void xtestthreshd() {
Threshd threshd = new Threshd();
threshd.init();
threshd.start();
threshd.stop();
}
public void testDoNothing() {
}
}
|
...
threshd.start();
threshd.stop();
}
public void testDoNothing() {
}
}
...
|
9bfde4878c40397646f5bc92d5c9de7137bd1a85
|
library/src/main/java/com/novoda/downloadmanager/DownloadBatchSizeCalculator.java
|
library/src/main/java/com/novoda/downloadmanager/DownloadBatchSizeCalculator.java
|
package com.novoda.downloadmanager;
import android.support.annotation.WorkerThread;
import java.util.List;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.*;
final class DownloadBatchSizeCalculator {
private DownloadBatchSizeCalculator() {
// non instantiable
}
@WorkerThread
static long getTotalSize(List<DownloadFile> downloadFiles, DownloadBatchStatus.Status status, DownloadBatchId downloadBatchId) {
long totalBatchSize = 0;
for (DownloadFile downloadFile : downloadFiles) {
if (status == DELETING || status == DELETED || status == PAUSED) {
Logger.w("abort getTotalSize file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
long totalFileSize = downloadFile.getTotalSize();
if (totalFileSize == 0) {
Logger.w("file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
totalBatchSize += totalFileSize;
}
return totalBatchSize;
}
}
|
package com.novoda.downloadmanager;
import android.support.annotation.WorkerThread;
import java.util.List;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.DELETING;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.DELETED;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.PAUSED;
final class DownloadBatchSizeCalculator {
private DownloadBatchSizeCalculator() {
// non instantiable
}
@WorkerThread
static long getTotalSize(List<DownloadFile> downloadFiles, DownloadBatchStatus.Status status, DownloadBatchId downloadBatchId) {
long totalBatchSize = 0;
for (DownloadFile downloadFile : downloadFiles) {
if (status == DELETING || status == DELETED || status == PAUSED) {
Logger.w("abort getTotalSize file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
long totalFileSize = downloadFile.getTotalSize();
if (totalFileSize == 0) {
Logger.w("file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
totalBatchSize += totalFileSize;
}
return totalBatchSize;
}
}
|
Replace star inport with explicit ones
|
Replace star inport with explicit ones
|
Java
|
apache-2.0
|
novoda/download-manager
|
java
|
## Code Before:
package com.novoda.downloadmanager;
import android.support.annotation.WorkerThread;
import java.util.List;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.*;
final class DownloadBatchSizeCalculator {
private DownloadBatchSizeCalculator() {
// non instantiable
}
@WorkerThread
static long getTotalSize(List<DownloadFile> downloadFiles, DownloadBatchStatus.Status status, DownloadBatchId downloadBatchId) {
long totalBatchSize = 0;
for (DownloadFile downloadFile : downloadFiles) {
if (status == DELETING || status == DELETED || status == PAUSED) {
Logger.w("abort getTotalSize file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
long totalFileSize = downloadFile.getTotalSize();
if (totalFileSize == 0) {
Logger.w("file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
totalBatchSize += totalFileSize;
}
return totalBatchSize;
}
}
## Instruction:
Replace star inport with explicit ones
## Code After:
package com.novoda.downloadmanager;
import android.support.annotation.WorkerThread;
import java.util.List;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.DELETING;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.DELETED;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.PAUSED;
final class DownloadBatchSizeCalculator {
private DownloadBatchSizeCalculator() {
// non instantiable
}
@WorkerThread
static long getTotalSize(List<DownloadFile> downloadFiles, DownloadBatchStatus.Status status, DownloadBatchId downloadBatchId) {
long totalBatchSize = 0;
for (DownloadFile downloadFile : downloadFiles) {
if (status == DELETING || status == DELETED || status == PAUSED) {
Logger.w("abort getTotalSize file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
long totalFileSize = downloadFile.getTotalSize();
if (totalFileSize == 0) {
Logger.w("file " + downloadFile.id().rawId()
+ " from batch " + downloadBatchId.rawId()
+ " with status " + status
+ " returns 0 as totalFileSize");
return 0;
}
totalBatchSize += totalFileSize;
}
return totalBatchSize;
}
}
|
// ... existing code ...
import java.util.List;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.DELETING;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.DELETED;
import static com.novoda.downloadmanager.DownloadBatchStatus.Status.PAUSED;
final class DownloadBatchSizeCalculator {
// ... rest of the code ...
|
03875be96ba09a252a05ba0ea84a3558c6428011
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
reqs_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, "requirements.txt")
reqs = None
with open(reqs_file) as f:
reqs = f.readlines()
setup(
version='0.1.0',
name='mediachain-client',
description='mediachain reader command line interface',
author='Mediachain Labs',
packages=find_packages('.'),
entry_points={
'console_scripts': [
'mediachain = mediachain.cli.main:main'
]
},
url='http://mediachain.io',
install_requires=reqs,
)
|
import os, sys
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
reqs_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, "requirements.txt")
reqs = None
with open(reqs_file) as f:
reqs = f.readlines()
def _pre_install(dir):
from subprocess import check_call
check_call(['scripts/build_grpc.sh'],
cwd=dir)
class install(_install):
def run(self):
self.execute(_pre_install, [os.path.dirname(__file__)],
msg="Generating protobuf")
_install.run(self)
setup(
version='0.1.0',
name='mediachain-client',
description='mediachain reader command line interface',
author='Mediachain Labs',
packages=find_packages('.'),
entry_points={
'console_scripts': [
'mediachain = mediachain.cli.main:main'
]
},
url='http://mediachain.io',
install_requires=reqs,
cmdclass={'install': install},
)
|
Add pre-install protobuf gen script
|
Add pre-install protobuf gen script
|
Python
|
mit
|
mediachain/mediachain-client,mediachain/mediachain-client
|
python
|
## Code Before:
import os
from setuptools import setup, find_packages
reqs_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, "requirements.txt")
reqs = None
with open(reqs_file) as f:
reqs = f.readlines()
setup(
version='0.1.0',
name='mediachain-client',
description='mediachain reader command line interface',
author='Mediachain Labs',
packages=find_packages('.'),
entry_points={
'console_scripts': [
'mediachain = mediachain.cli.main:main'
]
},
url='http://mediachain.io',
install_requires=reqs,
)
## Instruction:
Add pre-install protobuf gen script
## Code After:
import os, sys
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
reqs_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, "requirements.txt")
reqs = None
with open(reqs_file) as f:
reqs = f.readlines()
def _pre_install(dir):
from subprocess import check_call
check_call(['scripts/build_grpc.sh'],
cwd=dir)
class install(_install):
def run(self):
self.execute(_pre_install, [os.path.dirname(__file__)],
msg="Generating protobuf")
_install.run(self)
setup(
version='0.1.0',
name='mediachain-client',
description='mediachain reader command line interface',
author='Mediachain Labs',
packages=find_packages('.'),
entry_points={
'console_scripts': [
'mediachain = mediachain.cli.main:main'
]
},
url='http://mediachain.io',
install_requires=reqs,
cmdclass={'install': install},
)
|
...
import os, sys
from setuptools import setup, find_packages
from setuptools.command.install import install as _install
reqs_file = os.path.join(os.path.dirname(os.path.realpath(__file__))
, "requirements.txt")
...
reqs = None
with open(reqs_file) as f:
reqs = f.readlines()
def _pre_install(dir):
from subprocess import check_call
check_call(['scripts/build_grpc.sh'],
cwd=dir)
class install(_install):
def run(self):
self.execute(_pre_install, [os.path.dirname(__file__)],
msg="Generating protobuf")
_install.run(self)
setup(
version='0.1.0',
...
},
url='http://mediachain.io',
install_requires=reqs,
cmdclass={'install': install},
)
...
|
7f4c216b9996393dfb5390367a669db8431a6b17
|
whistlepost-app/src/main/java/org/mnode/whistlepost/model/Article.java
|
whistlepost-app/src/main/java/org/mnode/whistlepost/model/Article.java
|
package org.mnode.whistlepost.model;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.models.annotations.Model;
@Model(adaptables = {Resource.class, SlingHttpServletRequest.class})
public class Article extends Page {
public String getSummary() {
if (getPars().length > 0) {
// remove all markup from summary..
return getPars()[0].replaceAll("<\\w.*>", "");
} else {
return "";
}
}
public String[] getPars() {
return resource.getValueMap().get("pars", new String[] {});
}
public String getPublishedDate() {
return resource.getValueMap().get("date", String.class);
}
public String getMedia() {
return resource.getValueMap().get("media", String.class);
}
}
|
package org.mnode.whistlepost.model;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.models.annotations.Model;
import org.apache.sling.models.annotations.injectorspecific.SlingObject;
import java.util.Arrays;
import java.util.stream.Collectors;
@Model(adaptables = {Resource.class, SlingHttpServletRequest.class})
public class Article extends Page {
@SlingObject
protected ResourceResolver resourceResolver;
public String getSummary() {
if (getPars().length > 0) {
// remove all markup from summary..
return getPars()[0].replaceAll("<\\w.*>", "");
} else {
return "";
}
}
public String[] getPars() {
return resource.getValueMap().get("pars", new String[] {});
}
public String getPublishedDate() {
return resource.getValueMap().get("date", String.class);
}
public String getMedia() {
return resource.getValueMap().get("media", String.class);
}
public Article getSidebar() {
Resource sidebarResource = null;
String sidebar = resource.getValueMap().get("sidebar", String.class);
if (sidebar != null) {
if (sidebar.startsWith("/")) {
sidebarResource = resourceResolver.getResource(sidebar);
} else {
sidebarResource = resourceResolver.getResource(resource.getParent(), sidebar);
}
}
if (sidebarResource != null) {
return sidebarResource.adaptTo(Article.class);
} else {
return null;
}
}
public Iterable<Article> getRelated() {
String[] related = resource.getValueMap().get("related", new String[] {});
return Arrays.stream(related).map(p -> resourceResolver.getResource(resource.getParent(), p).adaptTo(Article.class))
.collect(Collectors.toList());
}
}
|
Support sidebar and related articles
|
Support sidebar and related articles
|
Java
|
apache-2.0
|
micronode/whistlepost,micronode/whistlepost,micronode/whistlepost,micronode/whistlepost
|
java
|
## Code Before:
package org.mnode.whistlepost.model;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.models.annotations.Model;
@Model(adaptables = {Resource.class, SlingHttpServletRequest.class})
public class Article extends Page {
public String getSummary() {
if (getPars().length > 0) {
// remove all markup from summary..
return getPars()[0].replaceAll("<\\w.*>", "");
} else {
return "";
}
}
public String[] getPars() {
return resource.getValueMap().get("pars", new String[] {});
}
public String getPublishedDate() {
return resource.getValueMap().get("date", String.class);
}
public String getMedia() {
return resource.getValueMap().get("media", String.class);
}
}
## Instruction:
Support sidebar and related articles
## Code After:
package org.mnode.whistlepost.model;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.models.annotations.Model;
import org.apache.sling.models.annotations.injectorspecific.SlingObject;
import java.util.Arrays;
import java.util.stream.Collectors;
@Model(adaptables = {Resource.class, SlingHttpServletRequest.class})
public class Article extends Page {
@SlingObject
protected ResourceResolver resourceResolver;
public String getSummary() {
if (getPars().length > 0) {
// remove all markup from summary..
return getPars()[0].replaceAll("<\\w.*>", "");
} else {
return "";
}
}
public String[] getPars() {
return resource.getValueMap().get("pars", new String[] {});
}
public String getPublishedDate() {
return resource.getValueMap().get("date", String.class);
}
public String getMedia() {
return resource.getValueMap().get("media", String.class);
}
public Article getSidebar() {
Resource sidebarResource = null;
String sidebar = resource.getValueMap().get("sidebar", String.class);
if (sidebar != null) {
if (sidebar.startsWith("/")) {
sidebarResource = resourceResolver.getResource(sidebar);
} else {
sidebarResource = resourceResolver.getResource(resource.getParent(), sidebar);
}
}
if (sidebarResource != null) {
return sidebarResource.adaptTo(Article.class);
} else {
return null;
}
}
public Iterable<Article> getRelated() {
String[] related = resource.getValueMap().get("related", new String[] {});
return Arrays.stream(related).map(p -> resourceResolver.getResource(resource.getParent(), p).adaptTo(Article.class))
.collect(Collectors.toList());
}
}
|
...
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.models.annotations.Model;
import org.apache.sling.models.annotations.injectorspecific.SlingObject;
import java.util.Arrays;
import java.util.stream.Collectors;
@Model(adaptables = {Resource.class, SlingHttpServletRequest.class})
public class Article extends Page {
@SlingObject
protected ResourceResolver resourceResolver;
public String getSummary() {
if (getPars().length > 0) {
...
public String getMedia() {
return resource.getValueMap().get("media", String.class);
}
public Article getSidebar() {
Resource sidebarResource = null;
String sidebar = resource.getValueMap().get("sidebar", String.class);
if (sidebar != null) {
if (sidebar.startsWith("/")) {
sidebarResource = resourceResolver.getResource(sidebar);
} else {
sidebarResource = resourceResolver.getResource(resource.getParent(), sidebar);
}
}
if (sidebarResource != null) {
return sidebarResource.adaptTo(Article.class);
} else {
return null;
}
}
public Iterable<Article> getRelated() {
String[] related = resource.getValueMap().get("related", new String[] {});
return Arrays.stream(related).map(p -> resourceResolver.getResource(resource.getParent(), p).adaptTo(Article.class))
.collect(Collectors.toList());
}
}
...
|
894fb1d68e82679720ed0acb71d478a8a1ba525d
|
openchordcharts/views/api.py
|
openchordcharts/views/api.py
|
from pyramid.view import view_config
from openchordcharts import model
@view_config(route_name='charts.json', renderer='jsonp')
def charts_json(request):
return [chart.to_json() for chart in model.Chart.find()]
|
from pyramid.view import view_config
from openchordcharts import model
@view_config(route_name='charts.json', renderer='jsonp')
def charts_json(request):
title = request.GET.get('title')
user = request.GET.get('user')
spec = {}
if title:
spec['title'] = title
if user:
spec['user'] = user
return [chart.to_json() for chart in model.Chart.find(spec)]
|
Add search by title and user for API.
|
Add search by title and user for API.
|
Python
|
agpl-3.0
|
openchordcharts/web-api,openchordcharts/openchordcharts-api
|
python
|
## Code Before:
from pyramid.view import view_config
from openchordcharts import model
@view_config(route_name='charts.json', renderer='jsonp')
def charts_json(request):
return [chart.to_json() for chart in model.Chart.find()]
## Instruction:
Add search by title and user for API.
## Code After:
from pyramid.view import view_config
from openchordcharts import model
@view_config(route_name='charts.json', renderer='jsonp')
def charts_json(request):
title = request.GET.get('title')
user = request.GET.get('user')
spec = {}
if title:
spec['title'] = title
if user:
spec['user'] = user
return [chart.to_json() for chart in model.Chart.find(spec)]
|
...
@view_config(route_name='charts.json', renderer='jsonp')
def charts_json(request):
title = request.GET.get('title')
user = request.GET.get('user')
spec = {}
if title:
spec['title'] = title
if user:
spec['user'] = user
return [chart.to_json() for chart in model.Chart.find(spec)]
...
|
a62d038885dcf0b97c544f3b091f2bfba7cc23d7
|
kitsune/sumo/widgets.py
|
kitsune/sumo/widgets.py
|
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
Add required renderer argument to Widget.render() call
|
Add required renderer argument to Widget.render() call
mozilla/sumo-project#136
|
Python
|
bsd-3-clause
|
mozilla/kitsune,mozilla/kitsune,mozilla/kitsune,mozilla/kitsune
|
python
|
## Code Before:
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = super(ImageWidget, self).render(name, value, attrs)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
## Instruction:
Add required renderer argument to Widget.render() call
mozilla/sumo-project#136
## Code After:
from django import forms
class ImageWidget(forms.FileInput):
"""
A ImageField Widget that shows a thumbnail.
"""
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
return output
|
...
def __init__(self, attrs={}):
super(ImageWidget, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
output = super(ImageWidget, self).render(name, value, attrs, renderer=renderer)
if value and hasattr(value, 'url'):
output = ('<div class="val-wrap"><img src="%s" alt="" />%s</div>' %
(value.url, output))
...
|
161cdf644aa9b8575f42dab537c5e3e01a186ec6
|
test/python_api/default-constructor/sb_address.py
|
test/python_api/default-constructor/sb_address.py
|
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.GetSectionType()
obj.GetSymbolContext(lldb.eSymbolContextEverything)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetBlock()
obj.GetSymbol()
obj.GetLineEntry()
obj.Clear()
|
Add new SBAddress APIs to the fuzz tests.
|
Add new SBAddress APIs to the fuzz tests.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@137625 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
python
|
## Code Before:
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.Clear()
## Instruction:
Add new SBAddress APIs to the fuzz tests.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@137625 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
import sys
import lldb
def fuzz_obj(obj):
obj.GetFileAddress()
obj.GetLoadAddress(lldb.SBTarget())
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.GetSectionType()
obj.GetSymbolContext(lldb.eSymbolContextEverything)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetBlock()
obj.GetSymbol()
obj.GetLineEntry()
obj.Clear()
|
...
obj.SetLoadAddress(0xffff, lldb.SBTarget())
obj.OffsetAddress(sys.maxint)
obj.GetDescription(lldb.SBStream())
obj.GetSectionType()
obj.GetSymbolContext(lldb.eSymbolContextEverything)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetBlock()
obj.GetSymbol()
obj.GetLineEntry()
obj.Clear()
...
|
778070f1791de57bc51dd84b43278091bc4850fc
|
config-provisioning/src/main/java/com/yahoo/config/provision/NodeType.java
|
config-provisioning/src/main/java/com/yahoo/config/provision/NodeType.java
|
package com.yahoo.config.provision;
/**
* The possible types of nodes in the node repository
*
* @author bratseth
*/
public enum NodeType {
/** A host of a set of (docker) tenant nodes */
host,
/** Nodes running the shared proxy layer */
proxy,
/** A node to be assigned to a tenant to run application workloads */
tenant
}
|
package com.yahoo.config.provision;
/**
* The possible types of nodes in the node repository
*
* @author bratseth
*/
public enum NodeType {
/** A host of a set of (docker) tenant nodes */
host,
/** Nodes running the shared proxy layer */
proxy,
/** A node to be assigned to a tenant to run application workloads */
tenant,
/** A config server */
config
}
|
Add node type for config server
|
Add node type for config server
|
Java
|
apache-2.0
|
vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa
|
java
|
## Code Before:
package com.yahoo.config.provision;
/**
* The possible types of nodes in the node repository
*
* @author bratseth
*/
public enum NodeType {
/** A host of a set of (docker) tenant nodes */
host,
/** Nodes running the shared proxy layer */
proxy,
/** A node to be assigned to a tenant to run application workloads */
tenant
}
## Instruction:
Add node type for config server
## Code After:
package com.yahoo.config.provision;
/**
* The possible types of nodes in the node repository
*
* @author bratseth
*/
public enum NodeType {
/** A host of a set of (docker) tenant nodes */
host,
/** Nodes running the shared proxy layer */
proxy,
/** A node to be assigned to a tenant to run application workloads */
tenant,
/** A config server */
config
}
|
// ... existing code ...
proxy,
/** A node to be assigned to a tenant to run application workloads */
tenant,
/** A config server */
config
}
// ... rest of the code ...
|
fb9591c4a2801bfe5f5380c3e33aa44a25db3591
|
customforms/models.py
|
customforms/models.py
|
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.form.id)])
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
Add absolute URLs to form and question admin
|
Add absolute URLs to form and question admin
|
Python
|
apache-2.0
|
cschwede/django-customforms
|
python
|
## Code Before:
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
## Instruction:
Add absolute URLs to form and question admin
## Code After:
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.form.id)])
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
...
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
...
class Meta:
ordering = ('title', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
...
class Meta:
ordering = ('form', 'position', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.form.id)])
class Choice(models.Model):
question = models.ForeignKey(Question)
...
|
35aca2dc94b129995db292352d7f5e79f05bde0c
|
Lib/test/crashers/compiler_recursion.py
|
Lib/test/crashers/compiler_recursion.py
|
compile('()'*9**5, '?', 'exec')
|
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it
|
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
python
|
## Code Before:
compile('()'*9**5, '?', 'exec')
## Instruction:
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it
## Code After:
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
// ... existing code ...
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
// ... rest of the code ...
|
2726404284fbae6388dbf40e01b6ad5ccf1c56a2
|
knights/compiler.py
|
knights/compiler.py
|
import ast
from .context import ContextScope
from .parser import Parser
from .utils import Helpers
def kompile(src, raw=False, filename='<compiler>'):
'''
Creates a new class based on the supplied template, and returnsit.
class Template(object):
def __call__(self, context):
return ''.join(self._iterator(context))
def _iterator(self, context):
return map(str, self._root(context)
def _root(self, context):
yield ''
yield ...
yield from self.head(context)
Blocks create new methods, and add a 'yield from self.{block}(context)' to
the current function
'''
parser = Parser(src)
parser.load_library('knights.tags')
parser.load_library('knights.helpers')
parser.build_method('_root')
if parser.parent:
# Remove _root from the method list
parser.methods = [
method for method in parser.methods if method.name != '_root'
]
klass = parser.build_class()
# Wrap it in a module
inst = ast.Module(body=[klass])
ast.fix_missing_locations(inst)
# Compile code to create class
code = compile(inst, filename=filename, mode='exec', optimize=2)
# Execute it and return the instance
g = {
'_': Helpers(parser.helpers),
'parent': parser.parent,
'ContextScope': ContextScope,
}
eval(code, g)
klass = g['Template']
if raw:
return klass
return klass()
|
import ast
from .context import ContextScope
from .parser import Parser
from .utils import Helpers
def kompile(src, raw=False, filename='<compiler>', **kwargs):
'''
Creates a new class based on the supplied template, and returnsit.
class Template(object):
def __call__(self, context):
return ''.join(self._iterator(context))
def _iterator(self, context):
return map(str, self._root(context)
def _root(self, context):
yield ''
yield ...
yield from self.head(context)
Blocks create new methods, and add a 'yield from self.{block}(context)' to
the current function
'''
parser = Parser(src)
parser.load_library('knights.tags')
parser.load_library('knights.helpers')
parser.build_method('_root')
if parser.parent:
# Remove _root from the method list
parser.methods = [
method for method in parser.methods if method.name != '_root'
]
klass = parser.build_class()
# Wrap it in a module
inst = ast.Module(body=[klass])
ast.fix_missing_locations(inst)
if kwargs.get('astor', False):
import astor
print(astor.to_source(inst))
# Compile code to create class
code = compile(inst, filename=filename, mode='exec', optimize=2)
# Execute it and return the instance
g = {
'_': Helpers(parser.helpers),
'parent': parser.parent,
'ContextScope': ContextScope,
}
eval(code, g)
klass = g['Template']
if raw:
return klass
return klass()
|
Add option to print astor reconstructed source of template
|
Add option to print astor reconstructed source of template
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
python
|
## Code Before:
import ast
from .context import ContextScope
from .parser import Parser
from .utils import Helpers
def kompile(src, raw=False, filename='<compiler>'):
'''
Creates a new class based on the supplied template, and returnsit.
class Template(object):
def __call__(self, context):
return ''.join(self._iterator(context))
def _iterator(self, context):
return map(str, self._root(context)
def _root(self, context):
yield ''
yield ...
yield from self.head(context)
Blocks create new methods, and add a 'yield from self.{block}(context)' to
the current function
'''
parser = Parser(src)
parser.load_library('knights.tags')
parser.load_library('knights.helpers')
parser.build_method('_root')
if parser.parent:
# Remove _root from the method list
parser.methods = [
method for method in parser.methods if method.name != '_root'
]
klass = parser.build_class()
# Wrap it in a module
inst = ast.Module(body=[klass])
ast.fix_missing_locations(inst)
# Compile code to create class
code = compile(inst, filename=filename, mode='exec', optimize=2)
# Execute it and return the instance
g = {
'_': Helpers(parser.helpers),
'parent': parser.parent,
'ContextScope': ContextScope,
}
eval(code, g)
klass = g['Template']
if raw:
return klass
return klass()
## Instruction:
Add option to print astor reconstructed source of template
## Code After:
import ast
from .context import ContextScope
from .parser import Parser
from .utils import Helpers
def kompile(src, raw=False, filename='<compiler>', **kwargs):
'''
Creates a new class based on the supplied template, and returnsit.
class Template(object):
def __call__(self, context):
return ''.join(self._iterator(context))
def _iterator(self, context):
return map(str, self._root(context)
def _root(self, context):
yield ''
yield ...
yield from self.head(context)
Blocks create new methods, and add a 'yield from self.{block}(context)' to
the current function
'''
parser = Parser(src)
parser.load_library('knights.tags')
parser.load_library('knights.helpers')
parser.build_method('_root')
if parser.parent:
# Remove _root from the method list
parser.methods = [
method for method in parser.methods if method.name != '_root'
]
klass = parser.build_class()
# Wrap it in a module
inst = ast.Module(body=[klass])
ast.fix_missing_locations(inst)
if kwargs.get('astor', False):
import astor
print(astor.to_source(inst))
# Compile code to create class
code = compile(inst, filename=filename, mode='exec', optimize=2)
# Execute it and return the instance
g = {
'_': Helpers(parser.helpers),
'parent': parser.parent,
'ContextScope': ContextScope,
}
eval(code, g)
klass = g['Template']
if raw:
return klass
return klass()
|
# ... existing code ...
from .utils import Helpers
def kompile(src, raw=False, filename='<compiler>', **kwargs):
'''
Creates a new class based on the supplied template, and returnsit.
# ... modified code ...
ast.fix_missing_locations(inst)
if kwargs.get('astor', False):
import astor
print(astor.to_source(inst))
# Compile code to create class
code = compile(inst, filename=filename, mode='exec', optimize=2)
# ... rest of the code ...
|
6d888061089648f2363f77f48fb7458a7ff16735
|
pyportify/serializers.py
|
pyportify/serializers.py
|
class Track():
artist = ""
name = ""
track_id = ""
def __init__(self, artist, name, track_id=""):
self.artist = artist
self.name = name
self.track_id = track_id
@staticmethod
def from_spotify(self, track):
track_id = track.get("id")
name = track.get("name")
artist = ""
if "artists" in track:
artist = track["artists"][0]["name"]
return Track(artist, name, track_id)
@staticmethod
def from_gpm(self, track):
return Track(
track.get("artist"),
track.get("title"),
track.get("storeId")
)
|
class Track():
artist = ""
name = ""
track_id = ""
def __init__(self, artist, name, track_id=""):
self.artist = artist
self.name = name
self.track_id = track_id
@classmethod
def from_spotify(cls, track):
track_id = track.get("id")
name = track.get("name")
artist = ""
if "artists" in track:
artist = track["artists"][0]["name"]
return cls(artist, name, track_id)
@classmethod
def from_gpm(cls, track):
return cls(
track.get("artist"),
track.get("title"),
track.get("storeId")
)
|
Change from_spotify and from_gpm to classmethods
|
Change from_spotify and from_gpm to classmethods
|
Python
|
apache-2.0
|
rckclmbr/pyportify,rckclmbr/pyportify,rckclmbr/pyportify,rckclmbr/pyportify
|
python
|
## Code Before:
class Track():
artist = ""
name = ""
track_id = ""
def __init__(self, artist, name, track_id=""):
self.artist = artist
self.name = name
self.track_id = track_id
@staticmethod
def from_spotify(self, track):
track_id = track.get("id")
name = track.get("name")
artist = ""
if "artists" in track:
artist = track["artists"][0]["name"]
return Track(artist, name, track_id)
@staticmethod
def from_gpm(self, track):
return Track(
track.get("artist"),
track.get("title"),
track.get("storeId")
)
## Instruction:
Change from_spotify and from_gpm to classmethods
## Code After:
class Track():
artist = ""
name = ""
track_id = ""
def __init__(self, artist, name, track_id=""):
self.artist = artist
self.name = name
self.track_id = track_id
@classmethod
def from_spotify(cls, track):
track_id = track.get("id")
name = track.get("name")
artist = ""
if "artists" in track:
artist = track["artists"][0]["name"]
return cls(artist, name, track_id)
@classmethod
def from_gpm(cls, track):
return cls(
track.get("artist"),
track.get("title"),
track.get("storeId")
)
|
// ... existing code ...
self.name = name
self.track_id = track_id
@classmethod
def from_spotify(cls, track):
track_id = track.get("id")
name = track.get("name")
artist = ""
// ... modified code ...
if "artists" in track:
artist = track["artists"][0]["name"]
return cls(artist, name, track_id)
@classmethod
def from_gpm(cls, track):
return cls(
track.get("artist"),
track.get("title"),
track.get("storeId")
// ... rest of the code ...
|
c143bc14be8d486d313056c0d1313e03ac438284
|
examples/ex_aps_parser.py
|
examples/ex_aps_parser.py
|
from __future__ import print_function
import os
import glob
import pyingest.parsers.aps as aps
import pyingest.parsers.arxiv as arxiv
import pyingest.serializers.classic
import traceback
import json
import xmltodict
from datetime import datetime
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
with open(input_list,'rU') as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.','/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ",fnord)
with open(f, 'rU') as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
outputfp = open('aps.tag', 'a')
serializer.write(document, outputfp)
outputfp.close()
#except:
# print "ERROR!\n%s\n"%f
# traceback.print_exc()
# pass
else:
print("not found :( ", fnord)
|
from __future__ import print_function
import os
import glob
import pyingest.parsers.aps as aps
import pyingest.parsers.arxiv as arxiv
import pyingest.serializers.classic
import traceback
import json
import xmltodict
from datetime import datetime
import sys
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
if sys.version_info > (3,):
open_mode = 'r'
else:
open_mode = 'rU'
with open(input_list, open_mode) as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.', '/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ", fnord)
with open(f, open_mode) as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
outputfp = open('aps.tag', 'a')
serializer.write(document, outputfp)
outputfp.close()
#except:
# print "ERROR!\n%s\n"%f
# traceback.print_exc()
# pass
else:
print("not found :( ", fnord)
|
Use open mode syntax on example file
|
Use open mode syntax on example file
|
Python
|
mit
|
adsabs/adsabs-pyingest,adsabs/adsabs-pyingest,adsabs/adsabs-pyingest
|
python
|
## Code Before:
from __future__ import print_function
import os
import glob
import pyingest.parsers.aps as aps
import pyingest.parsers.arxiv as arxiv
import pyingest.serializers.classic
import traceback
import json
import xmltodict
from datetime import datetime
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
with open(input_list,'rU') as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.','/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ",fnord)
with open(f, 'rU') as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
outputfp = open('aps.tag', 'a')
serializer.write(document, outputfp)
outputfp.close()
#except:
# print "ERROR!\n%s\n"%f
# traceback.print_exc()
# pass
else:
print("not found :( ", fnord)
## Instruction:
Use open mode syntax on example file
## Code After:
from __future__ import print_function
import os
import glob
import pyingest.parsers.aps as aps
import pyingest.parsers.arxiv as arxiv
import pyingest.serializers.classic
import traceback
import json
import xmltodict
from datetime import datetime
import sys
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
if sys.version_info > (3,):
open_mode = 'r'
else:
open_mode = 'rU'
with open(input_list, open_mode) as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.', '/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ", fnord)
with open(f, open_mode) as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
outputfp = open('aps.tag', 'a')
serializer.write(document, outputfp)
outputfp.close()
#except:
# print "ERROR!\n%s\n"%f
# traceback.print_exc()
# pass
else:
print("not found :( ", fnord)
|
// ... existing code ...
import json
import xmltodict
from datetime import datetime
import sys
input_list = 'bibc.2.out'
testfile=[]
xmldir = '/proj/ads/fulltext/sources/downloads/cache/APS_HARVEST/harvest.aps.org/v2/journals/articles/'
xmltail = '/fulltext.xml'
if sys.version_info > (3,):
open_mode = 'r'
else:
open_mode = 'rU'
with open(input_list, open_mode) as fi:
for l in fi.readlines():
doi = l.strip().split('\t')[1]
(a,b) = doi.split('/')
b = b.replace('.', '/')
infile = xmldir + a + '/' + b + xmltail
testfile.append(infile)
// ... modified code ...
for f in testfile:
fnord = f[92:]
if os.path.isfile(f):
print("found! ", fnord)
with open(f, open_mode) as fp:
parser = aps.APSJATSParser()
document = parser.parse(fp)
serializer = pyingest.serializers.classic.Tagged()
// ... rest of the code ...
|
21f7d85d5f22834e04a25ea23eabfd07b279bfe6
|
openedx/features/badging/constants.py
|
openedx/features/badging/constants.py
|
CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
|
CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
|
Add constant for badge url
|
Add constant for badge url
|
Python
|
agpl-3.0
|
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
|
python
|
## Code Before:
CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
## Instruction:
Add constant for badge url
## Code After:
CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
|
// ... existing code ...
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
// ... rest of the code ...
|
4c484a29480ec9d85a87ac7c2aaf09ced7d15457
|
nn/file/__init__.py
|
nn/file/__init__.py
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
|
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
Monitor number of batches in a input batch queue
|
Monitor number of batches in a input batch queue
|
Python
|
unlicense
|
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
|
python
|
## Code Before:
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from ..flags import FLAGS
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
def read_files(file_pattern, file_format):
return READERS[file_format](_file_pattern_to_names(file_pattern))
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
## Instruction:
Monitor number of batches in a input batch queue
## Code After:
import functools
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
|
...
import tensorflow as tf
from . import cnn_dailymail_rc
from .. import collections
from ..flags import FLAGS
from ..util import func_scope
...
READERS = { "cnn_dailymail_rc": cnn_dailymail_rc.read_files }
@func_scope
def read_files(file_pattern, file_format):
return monitored_batch_queue(
*READERS[file_format](_file_pattern_to_names(file_pattern)))
@func_scope
def _file_pattern_to_names(pattern):
return tf.train.string_input_producer(tf.train.match_filenames_once(pattern),
num_epochs=FLAGS.num_epochs,
capacity=FLAGS.filename_queue_capacity)
@func_scope
def monitored_batch_queue(*tensors):
queue = tf.FIFOQueue(FLAGS.batch_queue_capacity,
[tensor.dtype for tensor in tensors])
collections.add_metric(queue.size(), "batches_in_queue")
tf.train.add_queue_runner(
tf.train.QueueRunner(queue, [queue.enqueue(tensors)]))
results = queue.dequeue()
for tensor, result in zip(tensors, results):
result.set_shape(tensor.get_shape())
return results
...
|
ffdf13c8217f3a785fe8768697b3e3da4b6ff9cb
|
cherrypy/py3util.py
|
cherrypy/py3util.py
|
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
Use builtin sorted, reversed if available.
|
Use builtin sorted, reversed if available.
|
Python
|
bsd-3-clause
|
cherrypy/cheroot,Safihre/cherrypy,cherrypy/cherrypy,Safihre/cherrypy,cherrypy/cherrypy
|
python
|
## Code Before:
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
## Instruction:
Use builtin sorted, reversed if available.
## Code After:
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
...
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
...
|
a4f1fa704692894bcd568d02b23595e11910f791
|
apps/searchv2/tests/test_utils.py
|
apps/searchv2/tests/test_utils.py
|
from datetime import datetime
from django.test import TestCase
from package.tests import data, initial_data
from searchv2.utils import remove_prefix, clean_title
class UtilFunctionTest(TestCase):
def test_remove_prefix(self):
values = ["django-me","django.me","django/me","django_me"]
for value in values:
self.assertEqual(remove_prefix(value), "me")
def test_clean_title(self):
values = ["django-me","django.me","django/me","django_me"]
for value in values:
self.assertEqual(clean_title(value), "djangome")
|
from datetime import datetime
from django.conf import settings
from django.test import TestCase
from package.tests import data, initial_data
from searchv2.utils import remove_prefix, clean_title
class UtilFunctionTest(TestCase):
def setUp(self):
self.values = []
for value in ["-me",".me","/me","_me"]:
value = "{0}{1}".format(settings.PACKAGINATOR_SEARCH_PREFIX, value)
self.values.append(value)
def test_remove_prefix(self):
for value in self.values:
self.assertEqual(remove_prefix(value), "me")
def test_clean_title(self):
test_value = "{0}me".format(settings.PACKAGINATOR_SEARCH_PREFIX)
for value in self.values:
self.assertEqual(clean_title(value), test_value)
|
Fix to make site packages more generic in tests
|
Fix to make site packages more generic in tests
|
Python
|
mit
|
pydanny/djangopackages,audreyr/opencomparison,miketheman/opencomparison,nanuxbe/djangopackages,pydanny/djangopackages,QLGu/djangopackages,pydanny/djangopackages,miketheman/opencomparison,audreyr/opencomparison,benracine/opencomparison,nanuxbe/djangopackages,benracine/opencomparison,QLGu/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages
|
python
|
## Code Before:
from datetime import datetime
from django.test import TestCase
from package.tests import data, initial_data
from searchv2.utils import remove_prefix, clean_title
class UtilFunctionTest(TestCase):
def test_remove_prefix(self):
values = ["django-me","django.me","django/me","django_me"]
for value in values:
self.assertEqual(remove_prefix(value), "me")
def test_clean_title(self):
values = ["django-me","django.me","django/me","django_me"]
for value in values:
self.assertEqual(clean_title(value), "djangome")
## Instruction:
Fix to make site packages more generic in tests
## Code After:
from datetime import datetime
from django.conf import settings
from django.test import TestCase
from package.tests import data, initial_data
from searchv2.utils import remove_prefix, clean_title
class UtilFunctionTest(TestCase):
def setUp(self):
self.values = []
for value in ["-me",".me","/me","_me"]:
value = "{0}{1}".format(settings.PACKAGINATOR_SEARCH_PREFIX, value)
self.values.append(value)
def test_remove_prefix(self):
for value in self.values:
self.assertEqual(remove_prefix(value), "me")
def test_clean_title(self):
test_value = "{0}me".format(settings.PACKAGINATOR_SEARCH_PREFIX)
for value in self.values:
self.assertEqual(clean_title(value), test_value)
|
// ... existing code ...
from datetime import datetime
from django.conf import settings
from django.test import TestCase
from package.tests import data, initial_data
from searchv2.utils import remove_prefix, clean_title
class UtilFunctionTest(TestCase):
def setUp(self):
self.values = []
for value in ["-me",".me","/me","_me"]:
value = "{0}{1}".format(settings.PACKAGINATOR_SEARCH_PREFIX, value)
self.values.append(value)
def test_remove_prefix(self):
for value in self.values:
self.assertEqual(remove_prefix(value), "me")
def test_clean_title(self):
test_value = "{0}me".format(settings.PACKAGINATOR_SEARCH_PREFIX)
for value in self.values:
self.assertEqual(clean_title(value), test_value)
// ... rest of the code ...
|
d38f25a688692ed603dea1dc9fd5d89e14000e9e
|
subprojects/docs/src/snippets/dependencyManagement/managingTransitiveDependencies-dependencyAlignmentWithPlatform/kotlin/buildSrc/src/main/kotlin/myproject.java-library-conventions.gradle.kts
|
subprojects/docs/src/snippets/dependencyManagement/managingTransitiveDependencies-dependencyAlignmentWithPlatform/kotlin/buildSrc/src/main/kotlin/myproject.java-library-conventions.gradle.kts
|
plugins {
id("java-library")
id("myproject.publishing-conventions")
}
publishing {
publications {
create("maven", MavenPublication::class.java) {
from(components["java"])
}
}
}
|
plugins {
id("java-library")
id("myproject.publishing-conventions")
}
publishing {
publications {
create<MavenPublication>("maven") {
from(components["java"])
}
}
}
|
Use idiomatic Kotlin DSL in dependency alignment snippet
|
Use idiomatic Kotlin DSL in dependency alignment snippet
|
Kotlin
|
apache-2.0
|
gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle
|
kotlin
|
## Code Before:
plugins {
id("java-library")
id("myproject.publishing-conventions")
}
publishing {
publications {
create("maven", MavenPublication::class.java) {
from(components["java"])
}
}
}
## Instruction:
Use idiomatic Kotlin DSL in dependency alignment snippet
## Code After:
plugins {
id("java-library")
id("myproject.publishing-conventions")
}
publishing {
publications {
create<MavenPublication>("maven") {
from(components["java"])
}
}
}
|
# ... existing code ...
publishing {
publications {
create<MavenPublication>("maven") {
from(components["java"])
}
}
# ... rest of the code ...
|
5dc4a262771e616458feeaa9bf4ca8568736761a
|
docs/contributors/generate.py
|
docs/contributors/generate.py
|
import sys
from jinja2 import Template
from fetch import HERE, load_awesome_people
TPL_FILE = HERE / 'snippet.jinja2'
HTTPIE_TEAM = {
'BoboTiG',
'claudiatd',
'jakubroztocil',
'jkbr',
}
def generate_snippets(release: str) -> str:
people = load_awesome_people()
contributors = {
name: details
for name, details in people.items()
if details['github'] not in HTTPIE_TEAM
and (release in details['committed'] or release in details['reported'])
}
template = Template(source=TPL_FILE.read_text(encoding='utf-8'))
output = template.render(contributors=contributors, release=release)
print(output)
return 0
if __name__ == '__main__':
ret = 1
try:
ret = generate_snippets(sys.argv[1])
except (IndexError, TypeError):
ret = 2
print(f'''
Generate snippets for contributors to a release.
Usage:
python {sys.argv[0]} {sys.argv[0]} <RELEASE>
''')
sys.exit(ret)
|
import sys
from jinja2 import Template
from fetch import HERE, load_awesome_people
TPL_FILE = HERE / 'snippet.jinja2'
HTTPIE_TEAM = {
'claudiatd',
'jakubroztocil',
'jkbr',
}
def generate_snippets(release: str) -> str:
people = load_awesome_people()
contributors = {
name: details
for name, details in people.items()
if details['github'] not in HTTPIE_TEAM
and (release in details['committed'] or release in details['reported'])
}
template = Template(source=TPL_FILE.read_text(encoding='utf-8'))
output = template.render(contributors=contributors, release=release)
print(output)
return 0
if __name__ == '__main__':
ret = 1
try:
ret = generate_snippets(sys.argv[1])
except (IndexError, TypeError):
ret = 2
print(f'''
Generate snippets for contributors to a release.
Usage:
python {sys.argv[0]} {sys.argv[0]} <RELEASE>
''')
sys.exit(ret)
|
Remove myself from the HTTPie team
|
Remove myself from the HTTPie team
|
Python
|
bsd-3-clause
|
jakubroztocil/httpie,PKRoma/httpie,jakubroztocil/httpie,jakubroztocil/httpie,PKRoma/httpie
|
python
|
## Code Before:
import sys
from jinja2 import Template
from fetch import HERE, load_awesome_people
TPL_FILE = HERE / 'snippet.jinja2'
HTTPIE_TEAM = {
'BoboTiG',
'claudiatd',
'jakubroztocil',
'jkbr',
}
def generate_snippets(release: str) -> str:
people = load_awesome_people()
contributors = {
name: details
for name, details in people.items()
if details['github'] not in HTTPIE_TEAM
and (release in details['committed'] or release in details['reported'])
}
template = Template(source=TPL_FILE.read_text(encoding='utf-8'))
output = template.render(contributors=contributors, release=release)
print(output)
return 0
if __name__ == '__main__':
ret = 1
try:
ret = generate_snippets(sys.argv[1])
except (IndexError, TypeError):
ret = 2
print(f'''
Generate snippets for contributors to a release.
Usage:
python {sys.argv[0]} {sys.argv[0]} <RELEASE>
''')
sys.exit(ret)
## Instruction:
Remove myself from the HTTPie team
## Code After:
import sys
from jinja2 import Template
from fetch import HERE, load_awesome_people
TPL_FILE = HERE / 'snippet.jinja2'
HTTPIE_TEAM = {
'claudiatd',
'jakubroztocil',
'jkbr',
}
def generate_snippets(release: str) -> str:
people = load_awesome_people()
contributors = {
name: details
for name, details in people.items()
if details['github'] not in HTTPIE_TEAM
and (release in details['committed'] or release in details['reported'])
}
template = Template(source=TPL_FILE.read_text(encoding='utf-8'))
output = template.render(contributors=contributors, release=release)
print(output)
return 0
if __name__ == '__main__':
ret = 1
try:
ret = generate_snippets(sys.argv[1])
except (IndexError, TypeError):
ret = 2
print(f'''
Generate snippets for contributors to a release.
Usage:
python {sys.argv[0]} {sys.argv[0]} <RELEASE>
''')
sys.exit(ret)
|
# ... existing code ...
TPL_FILE = HERE / 'snippet.jinja2'
HTTPIE_TEAM = {
'claudiatd',
'jakubroztocil',
'jkbr',
# ... rest of the code ...
|
96512dd8484353bacd134a0bf9db774a166d530c
|
mitmproxy/platform/osx.py
|
mitmproxy/platform/osx.py
|
import subprocess
import pf
"""
Doing this the "right" way by using DIOCNATLOOK on the pf device turns out
to be a pain. Apple has made a number of modifications to the data
structures returned, and compiling userspace tools to test and work with
this turns out to be a pain in the ass. Parsing pfctl output is short,
simple, and works.
Note: Also Tested with FreeBSD 10 pkgng Python 2.7.x.
Should work almost exactly as on Mac OS X and except with some changes to
the output processing of pfctl (see pf.py).
"""
class Resolver(object):
STATECMD = ("sudo", "-n", "/sbin/pfctl", "-s", "state")
def original_addr(self, csock):
peer = csock.getpeername()
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if "sudo: a password is required" in e.output:
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
insufficient_priv = "sudo: a password is required" in stxt
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://mitmproxy.org/doc/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
|
import subprocess
import pf
"""
Doing this the "right" way by using DIOCNATLOOK on the pf device turns out
to be a pain. Apple has made a number of modifications to the data
structures returned, and compiling userspace tools to test and work with
this turns out to be a pain in the ass. Parsing pfctl output is short,
simple, and works.
Note: Also Tested with FreeBSD 10 pkgng Python 2.7.x.
Should work almost exactly as on Mac OS X and except with some changes to
the output processing of pfctl (see pf.py).
"""
class Resolver(object):
STATECMD = ("sudo", "-n", "/sbin/pfctl", "-s", "state")
def original_addr(self, csock):
peer = csock.getpeername()
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if "sudo: a password is required" in e.output:
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
insufficient_priv = "sudo: a password is required" in stxt
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://docs.mitmproxy.org/en/latest/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
|
Include correct documentation URL in error message
|
Include correct documentation URL in error message
|
Python
|
mit
|
mhils/mitmproxy,laurmurclar/mitmproxy,vhaupert/mitmproxy,dufferzafar/mitmproxy,mitmproxy/mitmproxy,laurmurclar/mitmproxy,cortesi/mitmproxy,vhaupert/mitmproxy,StevenVanAcker/mitmproxy,jvillacorta/mitmproxy,mitmproxy/mitmproxy,zlorb/mitmproxy,StevenVanAcker/mitmproxy,dwfreed/mitmproxy,Kriechi/mitmproxy,dwfreed/mitmproxy,xaxa89/mitmproxy,gzzhanghao/mitmproxy,cortesi/mitmproxy,ddworken/mitmproxy,jvillacorta/mitmproxy,dufferzafar/mitmproxy,ddworken/mitmproxy,ujjwal96/mitmproxy,MatthewShao/mitmproxy,tdickers/mitmproxy,ujjwal96/mitmproxy,MatthewShao/mitmproxy,Kriechi/mitmproxy,mhils/mitmproxy,cortesi/mitmproxy,gzzhanghao/mitmproxy,tdickers/mitmproxy,gzzhanghao/mitmproxy,mhils/mitmproxy,dufferzafar/mitmproxy,jvillacorta/mitmproxy,vhaupert/mitmproxy,ddworken/mitmproxy,StevenVanAcker/mitmproxy,tdickers/mitmproxy,laurmurclar/mitmproxy,ujjwal96/mitmproxy,xaxa89/mitmproxy,mosajjal/mitmproxy,MatthewShao/mitmproxy,mitmproxy/mitmproxy,ujjwal96/mitmproxy,ddworken/mitmproxy,Kriechi/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,xaxa89/mitmproxy,mosajjal/mitmproxy,MatthewShao/mitmproxy,mosajjal/mitmproxy,jvillacorta/mitmproxy,dufferzafar/mitmproxy,StevenVanAcker/mitmproxy,mitmproxy/mitmproxy,cortesi/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,dwfreed/mitmproxy,dwfreed/mitmproxy,zlorb/mitmproxy,xaxa89/mitmproxy,mitmproxy/mitmproxy,tdickers/mitmproxy,Kriechi/mitmproxy,zlorb/mitmproxy,gzzhanghao/mitmproxy,zlorb/mitmproxy,mhils/mitmproxy
|
python
|
## Code Before:
import subprocess
import pf
"""
Doing this the "right" way by using DIOCNATLOOK on the pf device turns out
to be a pain. Apple has made a number of modifications to the data
structures returned, and compiling userspace tools to test and work with
this turns out to be a pain in the ass. Parsing pfctl output is short,
simple, and works.
Note: Also Tested with FreeBSD 10 pkgng Python 2.7.x.
Should work almost exactly as on Mac OS X and except with some changes to
the output processing of pfctl (see pf.py).
"""
class Resolver(object):
STATECMD = ("sudo", "-n", "/sbin/pfctl", "-s", "state")
def original_addr(self, csock):
peer = csock.getpeername()
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if "sudo: a password is required" in e.output:
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
insufficient_priv = "sudo: a password is required" in stxt
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://mitmproxy.org/doc/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
## Instruction:
Include correct documentation URL in error message
## Code After:
import subprocess
import pf
"""
Doing this the "right" way by using DIOCNATLOOK on the pf device turns out
to be a pain. Apple has made a number of modifications to the data
structures returned, and compiling userspace tools to test and work with
this turns out to be a pain in the ass. Parsing pfctl output is short,
simple, and works.
Note: Also Tested with FreeBSD 10 pkgng Python 2.7.x.
Should work almost exactly as on Mac OS X and except with some changes to
the output processing of pfctl (see pf.py).
"""
class Resolver(object):
STATECMD = ("sudo", "-n", "/sbin/pfctl", "-s", "state")
def original_addr(self, csock):
peer = csock.getpeername()
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if "sudo: a password is required" in e.output:
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
insufficient_priv = "sudo: a password is required" in stxt
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://docs.mitmproxy.org/en/latest/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
|
// ... existing code ...
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://docs.mitmproxy.org/en/latest/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
// ... rest of the code ...
|
d041c9244a36db5aef29412824e9346aceb53c9f
|
editorconfig/__init__.py
|
editorconfig/__init__.py
|
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
|
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
Add get_properties class for simpler plugin usage
|
Add get_properties class for simpler plugin usage
|
Python
|
bsd-2-clause
|
VictorBjelkholm/editorconfig-vim,VictorBjelkholm/editorconfig-vim,pocke/editorconfig-vim,dublebuble/editorconfig-gedit,benjifisher/editorconfig-vim,benjifisher/editorconfig-vim,dublebuble/editorconfig-gedit,johnfraney/editorconfig-vim,dublebuble/editorconfig-gedit,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,johnfraney/editorconfig-vim,pocke/editorconfig-vim,pocke/editorconfig-vim,benjifisher/editorconfig-vim
|
python
|
## Code Before:
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['handler', 'exceptions', 'main']
__version__ = join_version(VERSION)
## Instruction:
Add get_properties class for simpler plugin usage
## Code After:
from versiontools import join_version
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
|
# ... existing code ...
VERSION = (0, 9, 0, "alpha")
__all__ = ['get_properties', 'EditorConfigError', 'handler', 'exceptions']
__version__ = join_version(VERSION)
def get_properties(filename):
handler = EditorConfigHandler(filename)
return handler.get_configurations()
from handler import EditorConfigHandler
from exceptions import *
# ... rest of the code ...
|
1d52996a88eb5aed643fe61ee959bd88373401b3
|
filebutler_upload/utils.py
|
filebutler_upload/utils.py
|
from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
|
from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
|
Throw a linebreak in there upon completion
|
Throw a linebreak in there upon completion
|
Python
|
bsd-3-clause
|
jhaals/filebutler-upload
|
python
|
## Code Before:
from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
if datetime.now() - self.time_updated > timedelta(seconds=0.5):
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
## Instruction:
Throw a linebreak in there upon completion
## Code After:
from datetime import datetime, timedelta
import sys
class ProgressBar(object):
def __init__(self, filename, fmt):
self.filename = filename
self.fmt = fmt
self.progress = 0
self.total = 0
self.time_started = datetime.now()
self.time_updated = self.time_started
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
speed=self.get_mbps()
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
def get_percent(self):
return self.progress / float(self.total)
def get_mbps(self):
time_delta = datetime.now() - self.time_started
if not time_delta.seconds:
return 0
return self.progress * 8 / float(time_delta.seconds) / 1000 / 1000
|
# ... existing code ...
def __call__(self, current, total):
self.progress = current
self.total = total
final_update = current == total
if datetime.now() - self.time_updated > timedelta(seconds=0.5) or final_update:
output = self.fmt.format(
filename=self.filename,
percent=self.get_percent(),
# ... modified code ...
)
sys.stdout.write('\r' + output)
if final_update:
sys.stdout.write('\n')
sys.stdout.flush()
self.time_updated = datetime.now()
# ... rest of the code ...
|
45c7e910f13a43427359801782eef7ce537d6f5f
|
delayed_assert/__init__.py
|
delayed_assert/__init__.py
|
from delayed_assert.delayed_assert import expect, assert_expectations
|
import sys
if sys.version_info > (3, 0): # Python 3 and above
from delayed_assert.delayed_assert import expect, assert_expectations
else: # for Python 2
from delayed_assert import expect, assert_expectations
|
Support for python 2 and 3
|
Support for python 2 and 3
|
Python
|
unlicense
|
pr4bh4sh/python-delayed-assert
|
python
|
## Code Before:
from delayed_assert.delayed_assert import expect, assert_expectations
## Instruction:
Support for python 2 and 3
## Code After:
import sys
if sys.version_info > (3, 0): # Python 3 and above
from delayed_assert.delayed_assert import expect, assert_expectations
else: # for Python 2
from delayed_assert import expect, assert_expectations
|
// ... existing code ...
import sys
if sys.version_info > (3, 0): # Python 3 and above
from delayed_assert.delayed_assert import expect, assert_expectations
else: # for Python 2
from delayed_assert import expect, assert_expectations
// ... rest of the code ...
|
42c6d252084fa9336cf5c5d1766de29bc31bf082
|
dbaas/workflow/steps/util/resize/start_database.py
|
dbaas/workflow/steps/util/resize/start_database.py
|
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
Add sleep on start database
|
Add sleep on start database
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
python
|
## Code Before:
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
## Instruction:
Add sleep on start database
## Code After:
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
class StartDatabase(BaseStep):
def __unicode__(self):
return "Starting database..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
databaseinfra = workflow_dict['databaseinfra']
host = workflow_dict['host']
return_code, output = use_database_initialization_script(databaseinfra=databaseinfra,
host=host,
option='stop')
if return_code != 0:
raise Exception(str(output))
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0022)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
|
// ... existing code ...
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0022
from workflow.steps.util.restore_snapshot import use_database_initialization_script
from time import sleep
LOG = logging.getLogger(__name__)
// ... modified code ...
instance = workflow_dict['instance']
if databaseinfra.plan.is_ha:
sleep(60)
driver = databaseinfra.get_driver()
driver.start_slave(instance=instance)
// ... rest of the code ...
|
959d20df781edb9f283f5317f50e8000f83e7ab6
|
tests/rules/test_no_such_file.py
|
tests/rules/test_no_such_file.py
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
Add `test_not_match` to `no_such_file` tests
|
Add `test_not_match` to `no_such_file` tests
|
Python
|
mit
|
manashmndl/thefuck,levythu/thefuck,qingying5810/thefuck,mlk/thefuck,vanita5/thefuck,artiya4u/thefuck,nvbn/thefuck,ostree/thefuck,lawrencebenson/thefuck,sekaiamber/thefuck,manashmndl/thefuck,thinkerchan/thefuck,princeofdarkness76/thefuck,subajat1/thefuck,PLNech/thefuck,lawrencebenson/thefuck,roth1002/thefuck,bigplus/thefuck,princeofdarkness76/thefuck,beni55/thefuck,zhangzhishan/thefuck,redreamality/thefuck,ostree/thefuck,NguyenHoaiNam/thefuck,hxddh/thefuck,BertieJim/thefuck,thesoulkiller/thefuck,bigplus/thefuck,mlk/thefuck,barneyElDinosaurio/thefuck,bugaevc/thefuck,Clpsplug/thefuck,mcarton/thefuck,SimenB/thefuck,MJerty/thefuck,levythu/thefuck,BertieJim/thefuck,vanita5/thefuck,Aeron/thefuck,thesoulkiller/thefuck,subajat1/thefuck,PLNech/thefuck,SimenB/thefuck,gogobebe2/thefuck,AntonChankin/thefuck,LawrenceHan/thefuck,mcarton/thefuck,AntonChankin/thefuck,mbbill/thefuck,Clpsplug/thefuck,LawrenceHan/thefuck,scorphus/thefuck,beni55/thefuck,MJerty/thefuck,scorphus/thefuck,thinkerchan/thefuck,redreamality/thefuck,nvbn/thefuck,roth1002/thefuck,hxddh/thefuck,barneyElDinosaurio/thefuck
|
python
|
## Code Before:
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
## Instruction:
Add `test_not_match` to `no_such_file` tests
## Code After:
import pytest
from thefuck.rules.no_such_file import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"),
Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"),
])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
])
def test_get_new_command(command, new_command):
assert get_new_command(command, None) == new_command
|
# ... existing code ...
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='mv foo bar/', stderr=""),
Command(script='mv foo bar/foo', stderr="mv: permission denied"),
])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, new_command', [
(Command(script='mv foo bar/foo', stderr="mv: cannot move 'foo' to 'bar/foo': No such file or directory"), 'mkdir -p bar && mv foo bar/foo'),
(Command(script='mv foo bar/', stderr="mv: cannot move 'foo' to 'bar/': No such file or directory"), 'mkdir -p bar && mv foo bar/'),
# ... rest of the code ...
|
e4e8c4e3b98e122e5cf4c9c349c4fb2abfe00ab1
|
api/bioguide/models.py
|
api/bioguide/models.py
|
from django.db import models
class Legislator(models.Model):
"""Model representing a legislator in a session of congress.
"""
bioguide_id = models.CharField(max_length=7, db_index=True)
prefix = models.CharField(max_length=16)
first = models.CharField(max_length=64)
last = models.CharField(max_length=64)
suffix = models.CharField(max_length=16)
birth_death = models.CharField(max_length=16)
position = models.CharField(max_length=24)
party = models.CharField(max_length=32)
state = models.CharField(max_length=2)
congress = models.CharField(max_length=3)
class Meta:
unique_together = (('bioguide_id', 'congress', ))
def __unicode__(self):
return ' '.join([self.prefix, self.first, self.last, self.suffix, ])
|
from django.db import models
class Legislator(models.Model):
"""Model representing a legislator in a session of congress.
"""
bioguide_id = models.CharField(max_length=7, db_index=True)
prefix = models.CharField(max_length=16)
first = models.CharField(max_length=64)
last = models.CharField(max_length=64)
suffix = models.CharField(max_length=16)
birth_death = models.CharField(max_length=16)
position = models.CharField(max_length=24)
party = models.CharField(max_length=32)
state = models.CharField(max_length=2)
congress = models.CharField(max_length=3)
class Meta:
unique_together = (('bioguide_id', 'congress', 'position', ))
def __unicode__(self):
return ' '.join([self.prefix, self.first, self.last, self.suffix, ])
|
Add 'position' to unique_together constraint, to account for members who serve in both houses during a Congress (h/t @derekwillis)
|
Add 'position' to unique_together constraint, to account for members who serve in both houses during a Congress (h/t @derekwillis)
|
Python
|
bsd-3-clause
|
propublica/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words
|
python
|
## Code Before:
from django.db import models
class Legislator(models.Model):
"""Model representing a legislator in a session of congress.
"""
bioguide_id = models.CharField(max_length=7, db_index=True)
prefix = models.CharField(max_length=16)
first = models.CharField(max_length=64)
last = models.CharField(max_length=64)
suffix = models.CharField(max_length=16)
birth_death = models.CharField(max_length=16)
position = models.CharField(max_length=24)
party = models.CharField(max_length=32)
state = models.CharField(max_length=2)
congress = models.CharField(max_length=3)
class Meta:
unique_together = (('bioguide_id', 'congress', ))
def __unicode__(self):
return ' '.join([self.prefix, self.first, self.last, self.suffix, ])
## Instruction:
Add 'position' to unique_together constraint, to account for members who serve in both houses during a Congress (h/t @derekwillis)
## Code After:
from django.db import models
class Legislator(models.Model):
"""Model representing a legislator in a session of congress.
"""
bioguide_id = models.CharField(max_length=7, db_index=True)
prefix = models.CharField(max_length=16)
first = models.CharField(max_length=64)
last = models.CharField(max_length=64)
suffix = models.CharField(max_length=16)
birth_death = models.CharField(max_length=16)
position = models.CharField(max_length=24)
party = models.CharField(max_length=32)
state = models.CharField(max_length=2)
congress = models.CharField(max_length=3)
class Meta:
unique_together = (('bioguide_id', 'congress', 'position', ))
def __unicode__(self):
return ' '.join([self.prefix, self.first, self.last, self.suffix, ])
|
...
congress = models.CharField(max_length=3)
class Meta:
unique_together = (('bioguide_id', 'congress', 'position', ))
def __unicode__(self):
return ' '.join([self.prefix, self.first, self.last, self.suffix, ])
...
|
cb577270522966fe7788bef3f0e395091ee4ba09
|
src/test/java/com/fasterxml/jackson/databind/deser/TestNoClassDefFoundDeserializer.java
|
src/test/java/com/fasterxml/jackson/databind/deser/TestNoClassDefFoundDeserializer.java
|
package com.fasterxml.jackson.databind.deser;
import javax.measure.Measure;
import java.util.List;
import com.fasterxml.jackson.databind.BaseMapTest;
import com.fasterxml.jackson.databind.ObjectMapper;
public class TestNoClassDefFoundDeserializer extends BaseMapTest {
public static class Parent {
public List<Child> child;
}
public static class Child {
public Measure measure;
}
public void testClassIsMissing() throws ClassNotFoundException
{
boolean missing = false;
try {
Class.forName("javax.measure.Measure");
} catch (ClassNotFoundException ex) {
missing = true;
}
assertTrue("javax.measure.Measure is not in classpath", missing);
}
public void testDeserialize() throws Exception
{
ObjectMapper m = new ObjectMapper();
Parent result = m.readValue(" { } ", Parent.class);
assertNotNull(result);
}
public void testUseMissingClass() throws Exception
{
boolean missing = false;
try {
ObjectMapper m = new ObjectMapper();
m.readValue(" { \"child\" : [{}] } ", Parent.class);
} catch (NoClassDefFoundError ex) {
missing = true;
}
assertTrue("cannot instantiate a missing class", missing);
}
}
|
package com.fasterxml.jackson.databind.deser;
import javax.measure.Measure;
import java.util.List;
import com.fasterxml.jackson.databind.BaseMapTest;
import com.fasterxml.jackson.databind.ObjectMapper;
public class TestNoClassDefFoundDeserializer extends BaseMapTest {
public static class Parent {
public List<Child> child;
}
public static class Child {
public Measure measure;
}
public void testClassIsMissing()
{
boolean missing = false;
try {
Class.forName("javax.measure.Measure");
} catch (ClassNotFoundException ex) {
missing = true;
}
assertTrue("javax.measure.Measure is not in classpath", missing);
}
public void testDeserialize() throws Exception
{
ObjectMapper m = new ObjectMapper();
Parent result = m.readValue(" { } ", Parent.class);
assertNotNull(result);
}
public void testUseMissingClass() throws Exception
{
boolean missing = false;
try {
ObjectMapper m = new ObjectMapper();
m.readValue(" { \"child\" : [{}] } ", Parent.class);
} catch (NoClassDefFoundError ex) {
missing = true;
}
assertTrue("cannot instantiate a missing class", missing);
}
}
|
Remove unneeded exception signature from unit test.
|
Remove unneeded exception signature from unit test.
|
Java
|
apache-2.0
|
FasterXML/jackson-databind,FasterXML/jackson-databind
|
java
|
## Code Before:
package com.fasterxml.jackson.databind.deser;
import javax.measure.Measure;
import java.util.List;
import com.fasterxml.jackson.databind.BaseMapTest;
import com.fasterxml.jackson.databind.ObjectMapper;
public class TestNoClassDefFoundDeserializer extends BaseMapTest {
public static class Parent {
public List<Child> child;
}
public static class Child {
public Measure measure;
}
public void testClassIsMissing() throws ClassNotFoundException
{
boolean missing = false;
try {
Class.forName("javax.measure.Measure");
} catch (ClassNotFoundException ex) {
missing = true;
}
assertTrue("javax.measure.Measure is not in classpath", missing);
}
public void testDeserialize() throws Exception
{
ObjectMapper m = new ObjectMapper();
Parent result = m.readValue(" { } ", Parent.class);
assertNotNull(result);
}
public void testUseMissingClass() throws Exception
{
boolean missing = false;
try {
ObjectMapper m = new ObjectMapper();
m.readValue(" { \"child\" : [{}] } ", Parent.class);
} catch (NoClassDefFoundError ex) {
missing = true;
}
assertTrue("cannot instantiate a missing class", missing);
}
}
## Instruction:
Remove unneeded exception signature from unit test.
## Code After:
package com.fasterxml.jackson.databind.deser;
import javax.measure.Measure;
import java.util.List;
import com.fasterxml.jackson.databind.BaseMapTest;
import com.fasterxml.jackson.databind.ObjectMapper;
public class TestNoClassDefFoundDeserializer extends BaseMapTest {
public static class Parent {
public List<Child> child;
}
public static class Child {
public Measure measure;
}
public void testClassIsMissing()
{
boolean missing = false;
try {
Class.forName("javax.measure.Measure");
} catch (ClassNotFoundException ex) {
missing = true;
}
assertTrue("javax.measure.Measure is not in classpath", missing);
}
public void testDeserialize() throws Exception
{
ObjectMapper m = new ObjectMapper();
Parent result = m.readValue(" { } ", Parent.class);
assertNotNull(result);
}
public void testUseMissingClass() throws Exception
{
boolean missing = false;
try {
ObjectMapper m = new ObjectMapper();
m.readValue(" { \"child\" : [{}] } ", Parent.class);
} catch (NoClassDefFoundError ex) {
missing = true;
}
assertTrue("cannot instantiate a missing class", missing);
}
}
|
// ... existing code ...
public Measure measure;
}
public void testClassIsMissing()
{
boolean missing = false;
try {
// ... rest of the code ...
|
c4cafa9ac3e737d5bab17548b7248258f72a2172
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Segue',
version='1.0.0dev',
description='Maya and Houdini geometry transfer helper.',
packages=[
'segue',
],
package_dir={
'': 'source'
},
author='Martin Pengelly-Phillips',
author_email='[email protected]',
license='Apache License (2.0)',
long_description=open('README.rst').read(),
url='https://bitbucket.org/4degrees/segue',
keywords='maya,houdini,transfer,cache'
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Segue',
version='1.0.0dev',
description='Maya and Houdini geometry transfer helper.',
packages=[
'segue',
],
package_dir={
'': 'source'
},
author='Martin Pengelly-Phillips',
author_email='[email protected]',
license='Apache License (2.0)',
long_description=open('README.rst').read(),
url='https://gitlab.com/4degrees/segue',
keywords='maya,houdini,transfer,cache'
)
|
Update links from Github to Gitlab following project move.
|
Update links from Github to Gitlab following project move.
|
Python
|
apache-2.0
|
4degrees/segue
|
python
|
## Code Before:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Segue',
version='1.0.0dev',
description='Maya and Houdini geometry transfer helper.',
packages=[
'segue',
],
package_dir={
'': 'source'
},
author='Martin Pengelly-Phillips',
author_email='[email protected]',
license='Apache License (2.0)',
long_description=open('README.rst').read(),
url='https://bitbucket.org/4degrees/segue',
keywords='maya,houdini,transfer,cache'
)
## Instruction:
Update links from Github to Gitlab following project move.
## Code After:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='Segue',
version='1.0.0dev',
description='Maya and Houdini geometry transfer helper.',
packages=[
'segue',
],
package_dir={
'': 'source'
},
author='Martin Pengelly-Phillips',
author_email='[email protected]',
license='Apache License (2.0)',
long_description=open('README.rst').read(),
url='https://gitlab.com/4degrees/segue',
keywords='maya,houdini,transfer,cache'
)
|
// ... existing code ...
author_email='[email protected]',
license='Apache License (2.0)',
long_description=open('README.rst').read(),
url='https://gitlab.com/4degrees/segue',
keywords='maya,houdini,transfer,cache'
)
// ... rest of the code ...
|
223ea8f705612360d86044aa8749d404b8abb3eb
|
src/test/java/com/warrenstrange/googleauth/GoogleAuthTest.java
|
src/test/java/com/warrenstrange/googleauth/GoogleAuthTest.java
|
package com.warrenstrange.googleauth;
import org.junit.Test;
/*
* Not really a unit test- but it shows usage
*/
public class GoogleAuthTest {
@Test
public void genSecretTest() {
GoogleAuthenticator gauth = new GoogleAuthenticator();
final GoogleAuthenticatorKey key = gauth.generateSecretKey();
final String secret = key.getKey();
String url = GoogleAuthenticatorKey.getQRBarcodeURL("testuser", "testhost", secret);
System.out.println("Please register " + url);
System.out.println("Secret key is " + secret);
}
// Change this to the saved secret from the running the above test.
static String savedSecret = "VV5OVNP4S42DQSS3";
@Test
public void authTest() {
// enter the code shown on device. Edit this and run it fast before the code expires!
int code = 863311;
GoogleAuthenticator ga = new GoogleAuthenticator();
ga.setWindowSize(5); //should give 5 * 30 seconds of grace...
boolean r = ga.authorize(savedSecret, code);
System.out.println("Check code = " + r);
}
}
|
package com.warrenstrange.googleauth;
import org.junit.Test;
/**
* Not really a unit test, but it shows the basic usage of this package.
* To properly test the authenticator, manual intervention and multiple steps
* are required:
* <ol>
* <li>Run the test in order to generate the required information for a
* Google Authenticator application to be configured.</li>
* <li>Set the <code>savedSecret</code> field with the value generated by the
* <code>GoogleAuthTest#genSecretTest</code> method.</li>
* <li>Generate the current code with the Google Authenticator application and
* set the <code>currentCode</code> accordingly.</li>
* <li>Check that the <code>#authTest</code> method correctly validates the
* data when invoking the <code>GoogleAuthenticator#authorize</code> method.
* </li>
* </ol>
*/
public class GoogleAuthTest {
// Change this to the saved secret from the running the above test.
private static String savedSecret = "VV5OVNP4S42DQSS3";
private static int currentCode = 863311;
@Test
public void genSecretTest() {
GoogleAuthenticator googleAuthenticator = new GoogleAuthenticator();
final GoogleAuthenticatorKey key =
googleAuthenticator.generateSecretKey();
final String secret = key.getKey();
String url = GoogleAuthenticatorKey.getQRBarcodeURL(
"testuser", "testhost", secret);
System.out.println("Please register " + url);
System.out.println("Secret key is " + secret);
}
@Test
public void authTest() {
GoogleAuthenticator ga = new GoogleAuthenticator();
ga.setWindowSize(5); //should give 5 * 30 seconds of grace...
boolean r = ga.authorize(savedSecret, currentCode);
System.out.println("Check currentCode = " + r);
}
}
|
Update the test class and properly document it.
|
Update the test class and properly document it.
|
Java
|
bsd-3-clause
|
fengshao0907/GoogleAuth,fengshao0907/GoogleAuth,wattale/GoogleAuth,ymauray/GoogleAuth,wattale/GoogleAuth,wstrange/GoogleAuth,ymauray/GoogleAuth,dmullins78/GoogleAuth,wstrange/GoogleAuth,dmullins78/GoogleAuth
|
java
|
## Code Before:
package com.warrenstrange.googleauth;
import org.junit.Test;
/*
* Not really a unit test- but it shows usage
*/
public class GoogleAuthTest {
@Test
public void genSecretTest() {
GoogleAuthenticator gauth = new GoogleAuthenticator();
final GoogleAuthenticatorKey key = gauth.generateSecretKey();
final String secret = key.getKey();
String url = GoogleAuthenticatorKey.getQRBarcodeURL("testuser", "testhost", secret);
System.out.println("Please register " + url);
System.out.println("Secret key is " + secret);
}
// Change this to the saved secret from the running the above test.
static String savedSecret = "VV5OVNP4S42DQSS3";
@Test
public void authTest() {
// enter the code shown on device. Edit this and run it fast before the code expires!
int code = 863311;
GoogleAuthenticator ga = new GoogleAuthenticator();
ga.setWindowSize(5); //should give 5 * 30 seconds of grace...
boolean r = ga.authorize(savedSecret, code);
System.out.println("Check code = " + r);
}
}
## Instruction:
Update the test class and properly document it.
## Code After:
package com.warrenstrange.googleauth;
import org.junit.Test;
/**
* Not really a unit test, but it shows the basic usage of this package.
* To properly test the authenticator, manual intervention and multiple steps
* are required:
* <ol>
* <li>Run the test in order to generate the required information for a
* Google Authenticator application to be configured.</li>
* <li>Set the <code>savedSecret</code> field with the value generated by the
* <code>GoogleAuthTest#genSecretTest</code> method.</li>
* <li>Generate the current code with the Google Authenticator application and
* set the <code>currentCode</code> accordingly.</li>
* <li>Check that the <code>#authTest</code> method correctly validates the
* data when invoking the <code>GoogleAuthenticator#authorize</code> method.
* </li>
* </ol>
*/
public class GoogleAuthTest {
// Change this to the saved secret from the running the above test.
private static String savedSecret = "VV5OVNP4S42DQSS3";
private static int currentCode = 863311;
@Test
public void genSecretTest() {
GoogleAuthenticator googleAuthenticator = new GoogleAuthenticator();
final GoogleAuthenticatorKey key =
googleAuthenticator.generateSecretKey();
final String secret = key.getKey();
String url = GoogleAuthenticatorKey.getQRBarcodeURL(
"testuser", "testhost", secret);
System.out.println("Please register " + url);
System.out.println("Secret key is " + secret);
}
@Test
public void authTest() {
GoogleAuthenticator ga = new GoogleAuthenticator();
ga.setWindowSize(5); //should give 5 * 30 seconds of grace...
boolean r = ga.authorize(savedSecret, currentCode);
System.out.println("Check currentCode = " + r);
}
}
|
...
import org.junit.Test;
/**
* Not really a unit test, but it shows the basic usage of this package.
* To properly test the authenticator, manual intervention and multiple steps
* are required:
* <ol>
* <li>Run the test in order to generate the required information for a
* Google Authenticator application to be configured.</li>
* <li>Set the <code>savedSecret</code> field with the value generated by the
* <code>GoogleAuthTest#genSecretTest</code> method.</li>
* <li>Generate the current code with the Google Authenticator application and
* set the <code>currentCode</code> accordingly.</li>
* <li>Check that the <code>#authTest</code> method correctly validates the
* data when invoking the <code>GoogleAuthenticator#authorize</code> method.
* </li>
* </ol>
*/
public class GoogleAuthTest {
// Change this to the saved secret from the running the above test.
private static String savedSecret = "VV5OVNP4S42DQSS3";
private static int currentCode = 863311;
@Test
public void genSecretTest() {
GoogleAuthenticator googleAuthenticator = new GoogleAuthenticator();
final GoogleAuthenticatorKey key =
googleAuthenticator.generateSecretKey();
final String secret = key.getKey();
String url = GoogleAuthenticatorKey.getQRBarcodeURL(
"testuser", "testhost", secret);
System.out.println("Please register " + url);
System.out.println("Secret key is " + secret);
}
@Test
public void authTest() {
GoogleAuthenticator ga = new GoogleAuthenticator();
ga.setWindowSize(5); //should give 5 * 30 seconds of grace...
boolean r = ga.authorize(savedSecret, currentCode);
System.out.println("Check currentCode = " + r);
}
...
|
643634e96554b00214ca4f0d45343e61b0df8e5a
|
foxybot/bot_help.py
|
foxybot/bot_help.py
|
import json
import os
class HelpManager(object):
_help_dict = {}
_last_modified = 0
@staticmethod
def get_help(lang, key):
""" Retrieve a given commands help text with given language.
:param lang: ISO 639-1 language code specifying language to try to retrieve
:param key: name of the command
:return: description in `lang` for `key`
"""
if os.path.getmtime('help.json') > HelpManager._last_modified:
HelpManager.load_help()
lang = lang.lower()
key = key.lower()
if lang not in HelpManager._help_dict:
print(f"[ERROR] tried to access `_help_dict[{lang}]`")
lang = 'en'
if key not in HelpManager._help_dict[lang]:
print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`")
return None
return HelpManager._help_dict[lang][key]
@staticmethod
def load_help():
try:
with open('help.json', 'r', encoding='utf-8') as infile:
HelpManager._help_dict = json.load(infile)
HelpManager._last_modified = os.path.getmtime('help.json')
except OSError as ex:
print("[ERROR] Cannot find `help.json`")
print(ex)
print(HelpManager._help_dict)
|
import json
import os
class HelpManager(object):
_help_dict = {}
_last_modified = 0
@staticmethod
def get_help(lang, key):
""" Retrieve a given commands help text with given language.
:param lang: ISO 639-1 language code specifying language to try to retrieve
:param key: name of the command
:return: description in `lang` for `key`
"""
if os.path.getmtime('help.json') > HelpManager._last_modified:
HelpManager.load_help()
lang = lang.lower()
key = key.lower()
if lang not in HelpManager._help_dict:
print(f"[ERROR] tried to access `_help_dict[{lang}]`")
lang = 'en'
if key not in HelpManager._help_dict[lang]:
print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`")
return None
return HelpManager._help_dict[lang][key]
@staticmethod
def load_help():
try:
with open('help.json', 'r', encoding='utf-8') as infile:
HelpManager._help_dict = json.load(infile)
HelpManager._last_modified = os.path.getmtime('help.json')
except OSError as ex:
print("[ERROR] Cannot find `help.json`")
print(ex)
|
Remove unneeded debug cod e
|
Remove unneeded debug cod
e
|
Python
|
bsd-2-clause
|
6180/foxybot
|
python
|
## Code Before:
import json
import os
class HelpManager(object):
_help_dict = {}
_last_modified = 0
@staticmethod
def get_help(lang, key):
""" Retrieve a given commands help text with given language.
:param lang: ISO 639-1 language code specifying language to try to retrieve
:param key: name of the command
:return: description in `lang` for `key`
"""
if os.path.getmtime('help.json') > HelpManager._last_modified:
HelpManager.load_help()
lang = lang.lower()
key = key.lower()
if lang not in HelpManager._help_dict:
print(f"[ERROR] tried to access `_help_dict[{lang}]`")
lang = 'en'
if key not in HelpManager._help_dict[lang]:
print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`")
return None
return HelpManager._help_dict[lang][key]
@staticmethod
def load_help():
try:
with open('help.json', 'r', encoding='utf-8') as infile:
HelpManager._help_dict = json.load(infile)
HelpManager._last_modified = os.path.getmtime('help.json')
except OSError as ex:
print("[ERROR] Cannot find `help.json`")
print(ex)
print(HelpManager._help_dict)
## Instruction:
Remove unneeded debug cod
e
## Code After:
import json
import os
class HelpManager(object):
_help_dict = {}
_last_modified = 0
@staticmethod
def get_help(lang, key):
""" Retrieve a given commands help text with given language.
:param lang: ISO 639-1 language code specifying language to try to retrieve
:param key: name of the command
:return: description in `lang` for `key`
"""
if os.path.getmtime('help.json') > HelpManager._last_modified:
HelpManager.load_help()
lang = lang.lower()
key = key.lower()
if lang not in HelpManager._help_dict:
print(f"[ERROR] tried to access `_help_dict[{lang}]`")
lang = 'en'
if key not in HelpManager._help_dict[lang]:
print(f"[ERROR] tried to access `_help_dict[{lang}][{key}]`")
return None
return HelpManager._help_dict[lang][key]
@staticmethod
def load_help():
try:
with open('help.json', 'r', encoding='utf-8') as infile:
HelpManager._help_dict = json.load(infile)
HelpManager._last_modified = os.path.getmtime('help.json')
except OSError as ex:
print("[ERROR] Cannot find `help.json`")
print(ex)
|
# ... existing code ...
except OSError as ex:
print("[ERROR] Cannot find `help.json`")
print(ex)
# ... rest of the code ...
|
2376316b36f4d6bdbdb82bea519b6296763bb2f2
|
MORK/ORKTaskResult+MORK.h
|
MORK/ORKTaskResult+MORK.h
|
//
// ORKCollectionResult+MORK.h
// MORK
//
// Created by Nolan Carroll on 4/23/15.
// Copyright (c) 2015 Medidata Solutions. All rights reserved.
//
#import "ORKResult.h"
@interface ORKTaskResult (MORK)
@property (readonly) NSArray *mork_fieldDataFromResults;
@end
|
//
// ORKCollectionResult+MORK.h
// MORK
//
// Created by Nolan Carroll on 4/23/15.
// Copyright (c) 2015 Medidata Solutions. All rights reserved.
//
#import "ORKResult.h"
@interface ORKTaskResult (MORK)
- (NSArray *)mork_getFieldDataFromResults;
@end
|
Put method declaration back into category header
|
Put method declaration back into category header
|
C
|
mit
|
mdsol/MORK,mdsol/MORK
|
c
|
## Code Before:
//
// ORKCollectionResult+MORK.h
// MORK
//
// Created by Nolan Carroll on 4/23/15.
// Copyright (c) 2015 Medidata Solutions. All rights reserved.
//
#import "ORKResult.h"
@interface ORKTaskResult (MORK)
@property (readonly) NSArray *mork_fieldDataFromResults;
@end
## Instruction:
Put method declaration back into category header
## Code After:
//
// ORKCollectionResult+MORK.h
// MORK
//
// Created by Nolan Carroll on 4/23/15.
// Copyright (c) 2015 Medidata Solutions. All rights reserved.
//
#import "ORKResult.h"
@interface ORKTaskResult (MORK)
- (NSArray *)mork_getFieldDataFromResults;
@end
|
// ... existing code ...
#import "ORKResult.h"
@interface ORKTaskResult (MORK)
- (NSArray *)mork_getFieldDataFromResults;
@end
// ... rest of the code ...
|
1e50bdf90756a79d45b0c35353d007c5dad2abfc
|
hand_data.py
|
hand_data.py
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
Return hand data as dictionary
|
Return hand data as dictionary
|
Python
|
mit
|
ssaamm/sign-language-tutor,ssaamm/sign-language-translator,ssaamm/sign-language-translator,ssaamm/sign-language-tutor
|
python
|
## Code Before:
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
[(finger1bone1x, finger1bone1y, finger1bone1z), ... finger5bone4z)]
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = []
for joint in finger_bones:
calibrated_finger_bones.append(joint - hand_center)
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
## Instruction:
Return hand data as dictionary
## Code After:
import time
from lib import Leap
from lib.Leap import Bone
'''
gets the current frame from controller
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
fingers = controller.frame().fingers
finger_bones = []
for finger in fingers:
finger_bones.append(finger.bone(Bone.TYPE_METACARPAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_PROXIMAL).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_INTERMEDIATE).next_joint)
finger_bones.append(finger.bone(Bone.TYPE_DISTAL).next_joint)
# possible issue when more than one hand
hands = controller.frame().hands
hand_center = 0
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
if __name__ == "__main__":
controller = Leap.Controller()
while True:
get_hand_position(controller)
time.sleep(1)
|
...
for each finger, stores the topmost end of each bone (4 points)
adjusts bone location relativity by subtracting the center of the palm
returns the adjusted bone locations in the form:
{feat0=some_float, feat1=some_float, ... feat59=some_float}
'''
def get_hand_position(controller):
print "NEW FRAME"
...
for hand in hands:
hand_center = hand.palm_position
calibrated_finger_bones = {}
for i in range(len(finger_bones)):
normalized_joint = (finger_bones[i] - hand_center).to_tuple()
for j in range(3):
calibrated_finger_bones["feat" + str(i*3+j)] = normalized_joint[j]
return calibrated_finger_bones
...
|
f9c7a911411429972929bb4372b370192bd4cf8a
|
altair/examples/interactive_layered_crossfilter.py
|
altair/examples/interactive_layered_crossfilter.py
|
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# blue background with selection
background = base.add_selection(brush)
# yellow highlights on the transformed data
highlight = base.encode(
color=alt.value('goldenrod')
).transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
|
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# gray background with selection
background = base.encode(
color=alt.value('#ddd')
).add_selection(brush)
# blue highlights on the transformed data
highlight = base.transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
|
Update crossfilter to gray/blue scheme
|
Update crossfilter to gray/blue scheme
Same as in https://vega.github.io/editor/#/examples/vega-lite/interactive_layered_crossfilter
|
Python
|
bsd-3-clause
|
altair-viz/altair,jakevdp/altair
|
python
|
## Code Before:
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# blue background with selection
background = base.add_selection(brush)
# yellow highlights on the transformed data
highlight = base.encode(
color=alt.value('goldenrod')
).transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
## Instruction:
Update crossfilter to gray/blue scheme
Same as in https://vega.github.io/editor/#/examples/vega-lite/interactive_layered_crossfilter
## Code After:
# category: interactive charts
import altair as alt
from vega_datasets import data
source = alt.UrlData(
data.flights_2k.url,
format={'parse': {'date': 'date'}}
)
brush = alt.selection(type='interval', encodings=['x'])
# Define the base chart, with the common parts of the
# background and highlights
base = alt.Chart().mark_bar().encode(
x=alt.X(alt.repeat('column'), type='quantitative', bin=alt.Bin(maxbins=20)),
y='count()'
).properties(
width=160,
height=130
)
# gray background with selection
background = base.encode(
color=alt.value('#ddd')
).add_selection(brush)
# blue highlights on the transformed data
highlight = base.transform_filter(brush)
# layer the two charts & repeat
alt.layer(
background,
highlight,
data=source
).transform_calculate(
"time",
"hours(datum.date)"
).repeat(column=["distance", "delay", "time"])
|
...
height=130
)
# gray background with selection
background = base.encode(
color=alt.value('#ddd')
).add_selection(brush)
# blue highlights on the transformed data
highlight = base.transform_filter(brush)
# layer the two charts & repeat
alt.layer(
...
|
3c49598aaaceaa73b7aeb033d4dffd21a14ecf7c
|
src/account.py
|
src/account.py
|
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
|
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
Create user, read input from stdin and hash password with sha
|
Create user, read input from stdin and hash password with sha
|
Python
|
mit
|
cboin/becon
|
python
|
## Code Before:
import sqlite3
import os
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
connexion.close()
## Instruction:
Create user, read input from stdin and hash password with sha
## Code After:
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
# get home dir
dir_config = expanduser("~") + "/.config/becon"
# check if config dir exists
if not (os.path.exists(dir_config)):
os.makedirs(dir_config)
# connexion to db
database = dir_config + "/storage.sq3"
connexion = sqlite3.connect(database)
cursor = connexion.cursor()
def config_db(t1, t2):
for row in cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='"+ t2 +"' AND name='"+ t2 +"';"):
print(row)
if ("users" not in row):
cursor.execute("CREATE TABLE users(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, nickname VARCHAR(55), fullname VARCHAR(128), password VARCHAR(255), email VARCHAR(128))")
elif ("password" not in row):
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
|
...
import sqlite3
import os
import sys
import hashlib
import getpass
from os.path import expanduser
def connect_db():
...
cursor.execute("CREATE TABLE password(id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE, host VARCHAR(255), user_id INTEGER, password VARCHAR(255), dt datetime DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY(user_id) REFERENCES users(id));")
connexion.commit()
def create_user():
nickname = input("Nickname: ")
fullname = input("Fullname: ")
password = hashlib.sha224(getpass.getpass().encode("utf-8")).hexdigest()
email = input("Email: ")
...
|
d18ae9c3e767e5b98db15731a03aad610aae4510
|
robber/matchers/contain.py
|
robber/matchers/contain.py
|
from robber import expect
from robber.explanation import Explanation
from robber.matchers.base import Base
class Contain(Base):
"""
expect({'key': value}).to.contain('key 1', 'key 2', 'key n')
expect([1, 2, 3]).to.contain(1, 2, 3)
"""
def matches(self):
expected_list = list(self.args)
expected_list.insert(0, self.expected)
if not self.is_negative:
for expected in expected_list:
if expected not in self.actual:
self.expected_arg = expected
return False
return True
else:
# As this is the negative case, we have to flip the return value.
for expected in expected_list:
if expected in self.actual:
self.expected_arg = expected
return True
return False
@property
def explanation(self):
return Explanation(self.actual, self.is_negative, 'contain', self.expected_arg, negative_action='exclude')
expect.register('contain', Contain)
expect.register('exclude', Contain, is_negative=True)
|
from robber import expect
from robber.explanation import Explanation
from robber.matchers.base import Base
class Contain(Base):
"""
expect({'key': value}).to.contain('key 1', 'key 2', 'key n')
expect([1, 2, 3]).to.contain(1, 2, 3)
"""
def matches(self):
expected_list = list(self.args)
expected_list.insert(0, self.expected)
if not self.is_negative:
excluded_args = set(expected_list).difference(self.actual)
try:
self.expected_arg = excluded_args.pop()
except KeyError:
return True
else:
return False
else:
# As this is the negative case, we have to flip the return value.
included_args = set(expected_list).intersection(self.actual)
try:
self.expected_arg = included_args.pop()
except KeyError:
return False
else:
return True
@property
def explanation(self):
return Explanation(self.actual, self.is_negative, 'contain', self.expected_arg, negative_action='exclude')
expect.register('contain', Contain)
expect.register('exclude', Contain, is_negative=True)
|
Use set's intersection and difference for better readability
|
[r] Use set's intersection and difference for better readability
|
Python
|
mit
|
vesln/robber.py
|
python
|
## Code Before:
from robber import expect
from robber.explanation import Explanation
from robber.matchers.base import Base
class Contain(Base):
"""
expect({'key': value}).to.contain('key 1', 'key 2', 'key n')
expect([1, 2, 3]).to.contain(1, 2, 3)
"""
def matches(self):
expected_list = list(self.args)
expected_list.insert(0, self.expected)
if not self.is_negative:
for expected in expected_list:
if expected not in self.actual:
self.expected_arg = expected
return False
return True
else:
# As this is the negative case, we have to flip the return value.
for expected in expected_list:
if expected in self.actual:
self.expected_arg = expected
return True
return False
@property
def explanation(self):
return Explanation(self.actual, self.is_negative, 'contain', self.expected_arg, negative_action='exclude')
expect.register('contain', Contain)
expect.register('exclude', Contain, is_negative=True)
## Instruction:
[r] Use set's intersection and difference for better readability
## Code After:
from robber import expect
from robber.explanation import Explanation
from robber.matchers.base import Base
class Contain(Base):
"""
expect({'key': value}).to.contain('key 1', 'key 2', 'key n')
expect([1, 2, 3]).to.contain(1, 2, 3)
"""
def matches(self):
expected_list = list(self.args)
expected_list.insert(0, self.expected)
if not self.is_negative:
excluded_args = set(expected_list).difference(self.actual)
try:
self.expected_arg = excluded_args.pop()
except KeyError:
return True
else:
return False
else:
# As this is the negative case, we have to flip the return value.
included_args = set(expected_list).intersection(self.actual)
try:
self.expected_arg = included_args.pop()
except KeyError:
return False
else:
return True
@property
def explanation(self):
return Explanation(self.actual, self.is_negative, 'contain', self.expected_arg, negative_action='exclude')
expect.register('contain', Contain)
expect.register('exclude', Contain, is_negative=True)
|
# ... existing code ...
expected_list.insert(0, self.expected)
if not self.is_negative:
excluded_args = set(expected_list).difference(self.actual)
try:
self.expected_arg = excluded_args.pop()
except KeyError:
return True
else:
return False
else:
# As this is the negative case, we have to flip the return value.
included_args = set(expected_list).intersection(self.actual)
try:
self.expected_arg = included_args.pop()
except KeyError:
return False
else:
return True
@property
def explanation(self):
# ... rest of the code ...
|
b13d509f6f8e627656f8af1bd5f262a63f31121f
|
lib/libncurses/termcap.h
|
lib/libncurses/termcap.h
|
extern "C"
{
#endif /* __cplusplus */
#include <sys/cdefs.h>
extern char PC;
extern char *UP;
extern char *BC;
extern short ospeed;
extern int tgetent __P((char *, const char *));
extern int tgetflag __P((const char *));
extern int tgetnum __P((const char *));
extern char *tgetstr __P((const char *, char **));
extern int tputs __P((const char *, int, int (*)(int)));
extern char *tgoto __P((const char *, int, int));
extern char *tparam __P((const char *, char *, int, ...));
#ifdef __cplusplus
}
#endif
#endif /* _TERMCAP_H */
|
__BEGIN_DECLS
extern char PC;
extern char *UP;
extern char *BC;
extern short ospeed;
extern int tgetent __P((char *, const char *));
extern int tgetflag __P((const char *));
extern int tgetnum __P((const char *));
extern char *tgetstr __P((const char *, char **));
extern int tputs __P((const char *, int, int (*)(int)));
extern char *tgoto __P((const char *, int, int));
extern char *tparam __P((const char *, char *, int, ...));
__END_DECLS
#endif /* _TERMCAP_H */
|
Make this file more BSD-like
|
Make this file more BSD-like
|
C
|
bsd-3-clause
|
jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase
|
c
|
## Code Before:
extern "C"
{
#endif /* __cplusplus */
#include <sys/cdefs.h>
extern char PC;
extern char *UP;
extern char *BC;
extern short ospeed;
extern int tgetent __P((char *, const char *));
extern int tgetflag __P((const char *));
extern int tgetnum __P((const char *));
extern char *tgetstr __P((const char *, char **));
extern int tputs __P((const char *, int, int (*)(int)));
extern char *tgoto __P((const char *, int, int));
extern char *tparam __P((const char *, char *, int, ...));
#ifdef __cplusplus
}
#endif
#endif /* _TERMCAP_H */
## Instruction:
Make this file more BSD-like
## Code After:
__BEGIN_DECLS
extern char PC;
extern char *UP;
extern char *BC;
extern short ospeed;
extern int tgetent __P((char *, const char *));
extern int tgetflag __P((const char *));
extern int tgetnum __P((const char *));
extern char *tgetstr __P((const char *, char **));
extern int tputs __P((const char *, int, int (*)(int)));
extern char *tgoto __P((const char *, int, int));
extern char *tparam __P((const char *, char *, int, ...));
__END_DECLS
#endif /* _TERMCAP_H */
|
...
__BEGIN_DECLS
extern char PC;
extern char *UP;
...
extern char *tgoto __P((const char *, int, int));
extern char *tparam __P((const char *, char *, int, ...));
__END_DECLS
#endif /* _TERMCAP_H */
...
|
66e67e53360a9f49ae73c8c8f2de49991525363b
|
txircd/modules/cmode_t.py
|
txircd/modules/cmode_t.py
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,DesertBus/txircd,ElementalAlchemist/txircd
|
python
|
## Code Before:
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
## Instruction:
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
## Code After:
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
...
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
...
|
683495bb65237aa83c5646b96997f80f4ddd98ec
|
src/com/atsebak/raspberrypi/protocol/ssh/SSH.java
|
src/com/atsebak/raspberrypi/protocol/ssh/SSH.java
|
package com.atsebak.raspberrypi.protocol.ssh;
import lombok.Builder;
import net.schmizz.sshj.SSHClient;
import net.schmizz.sshj.transport.verification.PromiscuousVerifier;
import java.io.IOException;
@Builder
public class SSH {
private int timeout;
private int connectionTimeout;
/**
* Build an SSHJ SSH Client
*
* @return
*/
public SSHClient toClient() {
SSHClient sshClient = new SSHClient();
try {
sshClient.addHostKeyVerifier(new PromiscuousVerifier());
sshClient.loadKnownHosts();
sshClient.setConnectTimeout(connectionTimeout);
sshClient.setTimeout(timeout);
} catch (IOException e) {
}
return sshClient;
}
}
|
package com.atsebak.raspberrypi.protocol.ssh;
import lombok.Builder;
import net.schmizz.sshj.SSHClient;
import net.schmizz.sshj.transport.verification.PromiscuousVerifier;
import java.io.IOException;
@Builder
public class SSH {
private int timeout;
private int connectionTimeout;
/**
* Build an SSHJ SSH Client
*
* @return
*/
public SSHClient toClient() {
SSHClient sshClient = new SSHClient();
try {
sshClient.addHostKeyVerifier(new PromiscuousVerifier());
sshClient.setConnectTimeout(connectionTimeout);
sshClient.setTimeout(timeout);
sshClient.loadKnownHosts();
} catch (IOException e) {
}
return sshClient;
}
}
|
Fix issue with test breaking when loading known hosts
|
Fix issue with test breaking when loading known hosts
|
Java
|
apache-2.0
|
asebak/embeddedlinux-jvmdebugger-intellij
|
java
|
## Code Before:
package com.atsebak.raspberrypi.protocol.ssh;
import lombok.Builder;
import net.schmizz.sshj.SSHClient;
import net.schmizz.sshj.transport.verification.PromiscuousVerifier;
import java.io.IOException;
@Builder
public class SSH {
private int timeout;
private int connectionTimeout;
/**
* Build an SSHJ SSH Client
*
* @return
*/
public SSHClient toClient() {
SSHClient sshClient = new SSHClient();
try {
sshClient.addHostKeyVerifier(new PromiscuousVerifier());
sshClient.loadKnownHosts();
sshClient.setConnectTimeout(connectionTimeout);
sshClient.setTimeout(timeout);
} catch (IOException e) {
}
return sshClient;
}
}
## Instruction:
Fix issue with test breaking when loading known hosts
## Code After:
package com.atsebak.raspberrypi.protocol.ssh;
import lombok.Builder;
import net.schmizz.sshj.SSHClient;
import net.schmizz.sshj.transport.verification.PromiscuousVerifier;
import java.io.IOException;
@Builder
public class SSH {
private int timeout;
private int connectionTimeout;
/**
* Build an SSHJ SSH Client
*
* @return
*/
public SSHClient toClient() {
SSHClient sshClient = new SSHClient();
try {
sshClient.addHostKeyVerifier(new PromiscuousVerifier());
sshClient.setConnectTimeout(connectionTimeout);
sshClient.setTimeout(timeout);
sshClient.loadKnownHosts();
} catch (IOException e) {
}
return sshClient;
}
}
|
# ... existing code ...
SSHClient sshClient = new SSHClient();
try {
sshClient.addHostKeyVerifier(new PromiscuousVerifier());
sshClient.setConnectTimeout(connectionTimeout);
sshClient.setTimeout(timeout);
sshClient.loadKnownHosts();
} catch (IOException e) {
}
# ... rest of the code ...
|
8b30ff1dfbb4131711553b644287076673e75aa6
|
zanata-common-api/src/main/java/org/zanata/rest/client/IProjectResource.java
|
zanata-common-api/src/main/java/org/zanata/rest/client/IProjectResource.java
|
package org.zanata.rest.client;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import org.jboss.resteasy.client.ClientResponse;
import org.zanata.rest.MediaTypes;
import org.zanata.rest.dto.Project;
import org.zanata.rest.service.ProjectResource;
//@Path("/projects/p/{projectSlug}")
public interface IProjectResource extends ProjectResource
{
@GET
@Produces( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse<Project> get();
@PUT
@Consumes( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse put(Project project);
}
|
package org.zanata.rest.client;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.HEAD;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.jboss.resteasy.client.ClientResponse;
import org.zanata.rest.MediaTypes;
import org.zanata.rest.dto.Project;
import org.zanata.rest.service.ProjectResource;
//@Path("/projects/p/{projectSlug}")
public interface IProjectResource extends ProjectResource
{
@HEAD
@Produces({ MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_JSON, MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public ClientResponse head();
@GET
@Produces( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse<Project> get();
@PUT
@Consumes( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse put(Project project);
}
|
Add Rest API restrictions to Project Iterations.
|
Add Rest API restrictions to Project Iterations.
(rhbz750673)
|
Java
|
lgpl-2.1
|
zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform
|
java
|
## Code Before:
package org.zanata.rest.client;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import org.jboss.resteasy.client.ClientResponse;
import org.zanata.rest.MediaTypes;
import org.zanata.rest.dto.Project;
import org.zanata.rest.service.ProjectResource;
//@Path("/projects/p/{projectSlug}")
public interface IProjectResource extends ProjectResource
{
@GET
@Produces( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse<Project> get();
@PUT
@Consumes( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse put(Project project);
}
## Instruction:
Add Rest API restrictions to Project Iterations.
(rhbz750673)
## Code After:
package org.zanata.rest.client;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.HEAD;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.jboss.resteasy.client.ClientResponse;
import org.zanata.rest.MediaTypes;
import org.zanata.rest.dto.Project;
import org.zanata.rest.service.ProjectResource;
//@Path("/projects/p/{projectSlug}")
public interface IProjectResource extends ProjectResource
{
@HEAD
@Produces({ MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_JSON, MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public ClientResponse head();
@GET
@Produces( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse<Project> get();
@PUT
@Consumes( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse put(Project project);
}
|
# ... existing code ...
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.HEAD;
import javax.ws.rs.PUT;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.jboss.resteasy.client.ClientResponse;
import org.zanata.rest.MediaTypes;
# ... modified code ...
public interface IProjectResource extends ProjectResource
{
@HEAD
@Produces({ MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_JSON, MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
public ClientResponse head();
@GET
@Produces( { MediaTypes.APPLICATION_ZANATA_PROJECT_XML, MediaTypes.APPLICATION_ZANATA_PROJECT_ITERATION_JSON })
public ClientResponse<Project> get();
# ... rest of the code ...
|
7a15de88ba3e92564b3b7bdf6ab6ea6fd246de44
|
include/platform/compiler/msvc.h
|
include/platform/compiler/msvc.h
|
// Clobber previous definitions with extreme prejudice
#ifdef UNUSED
# undef UNUSED
#endif
#ifdef likely
# undef likely
#endif
#ifdef unlikely
# undef unlikely
#endif
#ifdef alignment
# undef alignment
#endif
#define UNUSED __pragma(warning(disable:4100))
#define unlikely(x) (x)
#define likely(x) (x)
#define alignment(x) __declspec(align(x))
#if (_MSC_VER >= 1400)
# define restrict __restrict
#else
# define restrict
#endif
#if (MSC_VER <= 1500) && !defined(cplusplus)
# define inline __inline
#endif
#pragma warning(disable:4201 4214)
#ifndef HAVE_STDINT_H
# include "platform/os/stdint_msvc.h"
#endif
#if !defined(HAVE_STDBOOL_H) && !defined(cplusplus)
# include <Windows.h>
typedef BOOL bool;
# define true TRUE
# define false FALSE
#endif
|
// Clobber previous definitions with extreme prejudice
#ifdef UNUSED
# undef UNUSED
#endif
#ifdef likely
# undef likely
#endif
#ifdef unlikely
# undef unlikely
#endif
#ifdef alignment
# undef alignment
#endif
#define unlikely(x) (x)
#define likely(x) (x)
#define alignment(x) __declspec(align(x))
#if (_MSC_VER >= 1300)
# define UNUSED __pragma(warning(disable:4100))
#else
# define UNUSED
#endif
#if (_MSC_VER >= 1400)
# define restrict __restrict
#else
# define restrict
#endif
#if (MSC_VER <= 1500) && !defined(cplusplus)
# define inline __inline
#endif
#pragma warning(disable:4201 4214)
#ifndef HAVE_STDINT_H
# include "platform/os/stdint_msvc.h"
#endif
#if !defined(HAVE_STDBOOL_H) && !defined(cplusplus)
# include <Windows.h>
typedef BOOL bool;
# define true TRUE
# define false FALSE
#endif
|
Fix build on older MSVC.
|
Fix build on older MSVC.
|
C
|
bsd-3-clause
|
foxkit-us/supergameherm,supergameherm/supergameherm
|
c
|
## Code Before:
// Clobber previous definitions with extreme prejudice
#ifdef UNUSED
# undef UNUSED
#endif
#ifdef likely
# undef likely
#endif
#ifdef unlikely
# undef unlikely
#endif
#ifdef alignment
# undef alignment
#endif
#define UNUSED __pragma(warning(disable:4100))
#define unlikely(x) (x)
#define likely(x) (x)
#define alignment(x) __declspec(align(x))
#if (_MSC_VER >= 1400)
# define restrict __restrict
#else
# define restrict
#endif
#if (MSC_VER <= 1500) && !defined(cplusplus)
# define inline __inline
#endif
#pragma warning(disable:4201 4214)
#ifndef HAVE_STDINT_H
# include "platform/os/stdint_msvc.h"
#endif
#if !defined(HAVE_STDBOOL_H) && !defined(cplusplus)
# include <Windows.h>
typedef BOOL bool;
# define true TRUE
# define false FALSE
#endif
## Instruction:
Fix build on older MSVC.
## Code After:
// Clobber previous definitions with extreme prejudice
#ifdef UNUSED
# undef UNUSED
#endif
#ifdef likely
# undef likely
#endif
#ifdef unlikely
# undef unlikely
#endif
#ifdef alignment
# undef alignment
#endif
#define unlikely(x) (x)
#define likely(x) (x)
#define alignment(x) __declspec(align(x))
#if (_MSC_VER >= 1300)
# define UNUSED __pragma(warning(disable:4100))
#else
# define UNUSED
#endif
#if (_MSC_VER >= 1400)
# define restrict __restrict
#else
# define restrict
#endif
#if (MSC_VER <= 1500) && !defined(cplusplus)
# define inline __inline
#endif
#pragma warning(disable:4201 4214)
#ifndef HAVE_STDINT_H
# include "platform/os/stdint_msvc.h"
#endif
#if !defined(HAVE_STDBOOL_H) && !defined(cplusplus)
# include <Windows.h>
typedef BOOL bool;
# define true TRUE
# define false FALSE
#endif
|
# ... existing code ...
# undef alignment
#endif
#define unlikely(x) (x)
#define likely(x) (x)
#define alignment(x) __declspec(align(x))
#if (_MSC_VER >= 1300)
# define UNUSED __pragma(warning(disable:4100))
#else
# define UNUSED
#endif
#if (_MSC_VER >= 1400)
# define restrict __restrict
# ... rest of the code ...
|
c6f946d47fafbd01c5d607fc797cef88f85f0055
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='hyppocratic',
version='0.1',
description='Software to convert text files to EpiDoc compatible XML.',
author='Johathan Boyle, Nicolas Gruel',
packages=['CommentaryToEpidoc'],
install_requires=['docopt'],
entry_points={
'console_scripts': [
'CommentaryToEpidoc = hyppocratic.driver:main']
}
)
|
from setuptools import setup
setup(name='hyppocratic',
version='0.1',
description='Software to convert text files to EpiDoc compatible XML.',
author='Johathan Boyle, Nicolas Gruel',
packages=['hyppocratic'],
install_requires=['docopt'],
entry_points={
'console_scripts': [
'CommentaryToEpidoc = hyppocratic.driver:main']
}
)
|
Correct installation script to use the correct name for the package.
|
Correct installation script to use the correct name for the package.
Bug solved
|
Python
|
bsd-3-clause
|
gruel/AphorismToTEI
|
python
|
## Code Before:
from setuptools import setup
setup(name='hyppocratic',
version='0.1',
description='Software to convert text files to EpiDoc compatible XML.',
author='Johathan Boyle, Nicolas Gruel',
packages=['CommentaryToEpidoc'],
install_requires=['docopt'],
entry_points={
'console_scripts': [
'CommentaryToEpidoc = hyppocratic.driver:main']
}
)
## Instruction:
Correct installation script to use the correct name for the package.
Bug solved
## Code After:
from setuptools import setup
setup(name='hyppocratic',
version='0.1',
description='Software to convert text files to EpiDoc compatible XML.',
author='Johathan Boyle, Nicolas Gruel',
packages=['hyppocratic'],
install_requires=['docopt'],
entry_points={
'console_scripts': [
'CommentaryToEpidoc = hyppocratic.driver:main']
}
)
|
...
version='0.1',
description='Software to convert text files to EpiDoc compatible XML.',
author='Johathan Boyle, Nicolas Gruel',
packages=['hyppocratic'],
install_requires=['docopt'],
entry_points={
'console_scripts': [
...
|
36101e7b0c4a8305c0d787de185e8d8cd85c4887
|
beansdk/src/main/java/com/punchthrough/bean/sdk/message/BeanError.java
|
beansdk/src/main/java/com/punchthrough/bean/sdk/message/BeanError.java
|
package com.punchthrough.bean.sdk.message;
/**
* Represents an error returned by the Bean.
*/
public enum BeanError {
/**
* Timed out while waiting for state to update during programming, but before sending chunks
*/
STATE_TIMEOUT,
/**
* Bean did not provide a reason for the error
*/
UNKNOWN
}
|
package com.punchthrough.bean.sdk.message;
/**
* Represents an error returned by the Bean.
*/
public enum BeanError {
/**
* Timed out during sketch programming, before sending chunks: Bean took too long to update its
* current state
*/
STATE_TIMEOUT,
/**
* Timed out configuring OAD characteristics
*/
OAD_CONFIG_TIMEOUT,
/**
* Timed out requesting current firmware version
*/
CURR_FW_VER_TIMEOUT,
/**
* Timed out starting firmware download
*/
FW_START_TIMEOUT,
/**
* Timed out while sending firmware packets
*/
FW_DOWNLOAD_TIMEOUT,
/**
* Bean did not provide a reason for the error
*/
UNKNOWN
}
|
Add errors for firmware upload states
|
Add errors for firmware upload states
|
Java
|
mit
|
colus001/Bean-Android-SDK,androidgrl/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,PunchThrough/bean-sdk-android,hongbinz/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,PunchThrough/bean-sdk-android,swstack/Bean-Android-SDK,swstack/Bean-Android-SDK,hongbinz/Bean-Android-SDK,androidgrl/Bean-Android-SDK,colus001/Bean-Android-SDK
|
java
|
## Code Before:
package com.punchthrough.bean.sdk.message;
/**
* Represents an error returned by the Bean.
*/
public enum BeanError {
/**
* Timed out while waiting for state to update during programming, but before sending chunks
*/
STATE_TIMEOUT,
/**
* Bean did not provide a reason for the error
*/
UNKNOWN
}
## Instruction:
Add errors for firmware upload states
## Code After:
package com.punchthrough.bean.sdk.message;
/**
* Represents an error returned by the Bean.
*/
public enum BeanError {
/**
* Timed out during sketch programming, before sending chunks: Bean took too long to update its
* current state
*/
STATE_TIMEOUT,
/**
* Timed out configuring OAD characteristics
*/
OAD_CONFIG_TIMEOUT,
/**
* Timed out requesting current firmware version
*/
CURR_FW_VER_TIMEOUT,
/**
* Timed out starting firmware download
*/
FW_START_TIMEOUT,
/**
* Timed out while sending firmware packets
*/
FW_DOWNLOAD_TIMEOUT,
/**
* Bean did not provide a reason for the error
*/
UNKNOWN
}
|
...
*/
public enum BeanError {
/**
* Timed out during sketch programming, before sending chunks: Bean took too long to update its
* current state
*/
STATE_TIMEOUT,
/**
* Timed out configuring OAD characteristics
*/
OAD_CONFIG_TIMEOUT,
/**
* Timed out requesting current firmware version
*/
CURR_FW_VER_TIMEOUT,
/**
* Timed out starting firmware download
*/
FW_START_TIMEOUT,
/**
* Timed out while sending firmware packets
*/
FW_DOWNLOAD_TIMEOUT,
/**
* Bean did not provide a reason for the error
*/
...
|
bc20e8d01dc154d45f9dfc8f2b610d415a40f253
|
broadbean/__init__.py
|
broadbean/__init__.py
|
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
|
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
|
Remove import of version 1.0 feature
|
Remove import of version 1.0 feature
|
Python
|
mit
|
WilliamHPNielsen/broadbean
|
python
|
## Code Before:
from . import ripasso
from .element import Element
from .segment import Segment
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
## Instruction:
Remove import of version 1.0 feature
## Code After:
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
from .broadbean import PulseAtoms
|
// ... existing code ...
from . import ripasso
from .element import Element
from .sequence import Sequence
from .blueprint import BluePrint
from .tools import makeVaryingSequence, repeatAndVarySequence
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.