commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
6b9cc519deaecd093087d5190888b97b7b7eaf02
|
icekit/project/settings/_production.py
|
icekit/project/settings/_production.py
|
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
# EMAIL_HOST = ''
# EMAIL_HOST_USER = ''
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
# AWS_ACCESS_KEY_ID = ''
# AWS_STORAGE_BUCKET_NAME = ''
ENABLE_S3_MEDIA = True
|
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
ENABLE_S3_MEDIA = True
|
Remove vestigial hard coded production settings. These should be defined in a dotenv file, now.
|
Remove vestigial hard coded production settings. These should be defined in a dotenv file, now.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
python
|
## Code Before:
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
# EMAIL_HOST = ''
# EMAIL_HOST_USER = ''
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
# AWS_ACCESS_KEY_ID = ''
# AWS_STORAGE_BUCKET_NAME = ''
ENABLE_S3_MEDIA = True
## Instruction:
Remove vestigial hard coded production settings. These should be defined in a dotenv file, now.
## Code After:
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
ENABLE_S3_MEDIA = True
|
# ... existing code ...
'LOCATION': 'redis://redis:6379/1',
})
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
# ... modified code ...
# STORAGES ####################################################################
ENABLE_S3_MEDIA = True
# ... rest of the code ...
|
5acee7067df2af2b351bfb4b5757b4d53f023d32
|
radio/management/commands/export_talkgroups.py
|
radio/management/commands/export_talkgroups.py
|
import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
|
import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
|
Add system support to export talkgroups
|
Add system support to export talkgroups
|
Python
|
mit
|
ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player
|
python
|
## Code Before:
import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
def handle(self, *args, **options):
f_name = options['file']
export_tg_file(f_name)
def export_tg_file(file_name):
''' Using the talkgroup file from trunk-recorder'''
talkgroups = TalkGroup.objects.all()
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
## Instruction:
Add system support to export talkgroups
## Code After:
import sys
import datetime
import csv
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from radio.models import *
class Command(BaseCommand):
help = 'Import talkgroup info'
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
description = t.description
hex_val = str(hex(t.dec_id))[2:-1].zfill(3)
try:
alpha = alpha.rstrip()
except AttributeError:
pass
try:
description = description.rstrip()
except AttributeError:
pass
common = ''
if(t.common_name):
common = t.common_name
tg_file.write("{},{},{},{},{},{}\n".format(t.dec_id,hex_val,t.mode,alpha,description,t.priority))
|
# ... existing code ...
def add_arguments(self, parser):
parser.add_argument('file')
parser.add_argument(
'--system',
type=int,
default=-1,
help='Export talkgroups from only this system',
)
def handle(self, *args, **options):
export_tg_file(options)
def export_tg_file(options):
''' Using the talkgroup file from trunk-recorder'''
file_name = options['file']
system = options['system']
talkgroups = TalkGroup.objects.all()
if system >= 0:
talkgroups = talkgroups.filter(system=system)
with open(file_name, "w") as tg_file:
for t in talkgroups:
alpha = t.alpha_tag
# ... rest of the code ...
|
2e267050997a95c35061fa68b522b0e9c225de2c
|
src/main/java/com/example/client/Dragula.java
|
src/main/java/com/example/client/Dragula.java
|
package com.example.client;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsType;
@JsType(isNative = true)
public class Dragula {
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element... elements);
}
|
package com.example.client;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
@JsType(isNative = true)
public class Dragula {
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element... elements);
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element[] elements, DragulaOptions options);
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(DragulaOptions options);
@JsOverlay
public static Dragula dragula(Element element, DragulaOptions options) {
return dragula(new Element[] { element }, options);
}
@JsOverlay
public static Dragula dragula(Element element, Element element2, DragulaOptions options) {
return dragula(new Element[] { element, element2 }, options);
}
@JsOverlay
public static Dragula dragula(Element element, Element element2, Element element3, DragulaOptions options) {
return dragula(new Element[] { element, element2, element3 }, options);
}
@JsOverlay
public static Dragula dragula(DragulaOptions options, Element... elements) {
return dragula(elements, options);
}
@JsProperty
public native JsArray<Element> getContainers();
}
|
Add more methods to dragula
|
Add more methods to dragula
|
Java
|
mit
|
ArloL/gwt-dragula-test,ArloL/gwt-dragula-test
|
java
|
## Code Before:
package com.example.client;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsType;
@JsType(isNative = true)
public class Dragula {
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element... elements);
}
## Instruction:
Add more methods to dragula
## Code After:
package com.example.client;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
@JsType(isNative = true)
public class Dragula {
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element... elements);
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element[] elements, DragulaOptions options);
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(DragulaOptions options);
@JsOverlay
public static Dragula dragula(Element element, DragulaOptions options) {
return dragula(new Element[] { element }, options);
}
@JsOverlay
public static Dragula dragula(Element element, Element element2, DragulaOptions options) {
return dragula(new Element[] { element, element2 }, options);
}
@JsOverlay
public static Dragula dragula(Element element, Element element2, Element element3, DragulaOptions options) {
return dragula(new Element[] { element, element2, element3 }, options);
}
@JsOverlay
public static Dragula dragula(DragulaOptions options, Element... elements) {
return dragula(elements, options);
}
@JsProperty
public native JsArray<Element> getContainers();
}
|
// ... existing code ...
package com.example.client;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
@JsType(isNative = true)
// ... modified code ...
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element... elements);
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(Element[] elements, DragulaOptions options);
@JsMethod(namespace = JsPackage.GLOBAL)
public native static Dragula dragula(DragulaOptions options);
@JsOverlay
public static Dragula dragula(Element element, DragulaOptions options) {
return dragula(new Element[] { element }, options);
}
@JsOverlay
public static Dragula dragula(Element element, Element element2, DragulaOptions options) {
return dragula(new Element[] { element, element2 }, options);
}
@JsOverlay
public static Dragula dragula(Element element, Element element2, Element element3, DragulaOptions options) {
return dragula(new Element[] { element, element2, element3 }, options);
}
@JsOverlay
public static Dragula dragula(DragulaOptions options, Element... elements) {
return dragula(elements, options);
}
@JsProperty
public native JsArray<Element> getContainers();
}
// ... rest of the code ...
|
434d72abc546769283e253c3114588b454d798d9
|
OpERP/src/main/java/devopsdistilled/operp/server/data/entity/items/Category.java
|
OpERP/src/main/java/devopsdistilled/operp/server/data/entity/items/Category.java
|
package devopsdistilled.operp.server.data.entity.items;
import java.io.Serializable;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import devopsdistilled.operp.server.data.entity.Entiti;
@Entity
public class Category extends Entiti implements Serializable {
private static final long serialVersionUID = -3809686715120885998L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long categoryId;
private String categoryName;
@ManyToMany(mappedBy = "categories")
private List<Product> products;
public Long getCategoryId() {
return categoryId;
}
public void setCategoryId(Long categoryId) {
this.categoryId = categoryId;
}
public String getCategoryName() {
return categoryName;
}
public void setCategoryName(String categoryName) {
this.categoryName = categoryName;
}
public List<Product> getProducts() {
return products;
}
public void setProducts(List<Product> products) {
this.products = products;
}
}
|
package devopsdistilled.operp.server.data.entity.items;
import java.io.Serializable;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import devopsdistilled.operp.server.data.entity.Entiti;
@Entity
public class Category extends Entiti implements Serializable {
private static final long serialVersionUID = -3809686715120885998L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long categoryId;
@Column(unique = true)
private String categoryName;
@ManyToMany(mappedBy = "categories")
private List<Product> products;
public Long getCategoryId() {
return categoryId;
}
public void setCategoryId(Long categoryId) {
this.categoryId = categoryId;
}
public String getCategoryName() {
return categoryName;
}
public void setCategoryName(String categoryName) {
this.categoryName = categoryName;
}
public List<Product> getProducts() {
return products;
}
public void setProducts(List<Product> products) {
this.products = products;
}
}
|
Enforce unique to column categoryName
|
Enforce unique to column categoryName
OPEN - task 54: Create Category CRUD packages
http://github.com/DevOpsDistilled/OpERP/issues/issue/54
|
Java
|
mit
|
njmube/OpERP,DevOpsDistilled/OpERP
|
java
|
## Code Before:
package devopsdistilled.operp.server.data.entity.items;
import java.io.Serializable;
import java.util.List;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import devopsdistilled.operp.server.data.entity.Entiti;
@Entity
public class Category extends Entiti implements Serializable {
private static final long serialVersionUID = -3809686715120885998L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long categoryId;
private String categoryName;
@ManyToMany(mappedBy = "categories")
private List<Product> products;
public Long getCategoryId() {
return categoryId;
}
public void setCategoryId(Long categoryId) {
this.categoryId = categoryId;
}
public String getCategoryName() {
return categoryName;
}
public void setCategoryName(String categoryName) {
this.categoryName = categoryName;
}
public List<Product> getProducts() {
return products;
}
public void setProducts(List<Product> products) {
this.products = products;
}
}
## Instruction:
Enforce unique to column categoryName
OPEN - task 54: Create Category CRUD packages
http://github.com/DevOpsDistilled/OpERP/issues/issue/54
## Code After:
package devopsdistilled.operp.server.data.entity.items;
import java.io.Serializable;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import devopsdistilled.operp.server.data.entity.Entiti;
@Entity
public class Category extends Entiti implements Serializable {
private static final long serialVersionUID = -3809686715120885998L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long categoryId;
@Column(unique = true)
private String categoryName;
@ManyToMany(mappedBy = "categories")
private List<Product> products;
public Long getCategoryId() {
return categoryId;
}
public void setCategoryId(Long categoryId) {
this.categoryId = categoryId;
}
public String getCategoryName() {
return categoryName;
}
public void setCategoryName(String categoryName) {
this.categoryName = categoryName;
}
public List<Product> getProducts() {
return products;
}
public void setProducts(List<Product> products) {
this.products = products;
}
}
|
// ... existing code ...
import java.io.Serializable;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
// ... modified code ...
@GeneratedValue(strategy = GenerationType.AUTO)
private Long categoryId;
@Column(unique = true)
private String categoryName;
@ManyToMany(mappedBy = "categories")
// ... rest of the code ...
|
5ee7f1981c508eaaefca2a27803cf531d08f2120
|
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/dc/RemoveNetworkCommand.java
|
backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/network/dc/RemoveNetworkCommand.java
|
package org.ovirt.engine.core.bll.network.dc;
import org.ovirt.engine.core.bll.validator.NetworkValidator;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.action.AddNetworkStoragePoolParameters;
import org.ovirt.engine.core.common.errors.VdcBllMessages;
public class RemoveNetworkCommand<T extends AddNetworkStoragePoolParameters> extends NetworkCommon<T> {
public RemoveNetworkCommand(T parameters) {
super(parameters);
}
@Override
protected void executeCommand() {
getNetworkDAO().remove(getNetwork().getId());
setSucceeded(true);
}
@Override
protected void setActionMessageParameters() {
super.setActionMessageParameters();
addCanDoActionMessage(VdcBllMessages.VAR__ACTION__REMOVE);
}
@Override
protected boolean canDoAction() {
NetworkValidator validator = new NetworkValidator(getNetworkDAO().get(getNetwork().getId()));
return validate(validator.networkIsSet())
&& validate(validator.notManagementNetwork())
&& validate(validator.networkNotUsedByVms())
&& validate(validator.networkNotUsedByTemplates());
}
@Override
public AuditLogType getAuditLogTypeValue() {
return getSucceeded() ? AuditLogType.NETWORK_REMOVE_NETWORK : AuditLogType.NETWORK_REMOVE_NETWORK_FAILED;
}
}
|
package org.ovirt.engine.core.bll.network.dc;
import org.ovirt.engine.core.bll.validator.NetworkValidator;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.action.AddNetworkStoragePoolParameters;
import org.ovirt.engine.core.common.errors.VdcBllMessages;
public class RemoveNetworkCommand<T extends AddNetworkStoragePoolParameters> extends NetworkCommon<T> {
public RemoveNetworkCommand(T parameters) {
super(parameters);
}
@Override
protected void executeCommand() {
removeVnicProfiles();
getNetworkDAO().remove(getNetwork().getId());
setSucceeded(true);
}
@Override
protected void setActionMessageParameters() {
super.setActionMessageParameters();
addCanDoActionMessage(VdcBllMessages.VAR__ACTION__REMOVE);
}
@Override
protected boolean canDoAction() {
NetworkValidator validator = new NetworkValidator(getNetworkDAO().get(getNetwork().getId()));
return validate(validator.networkIsSet())
&& validate(validator.notManagementNetwork())
&& validate(validator.networkNotUsedByVms())
&& validate(validator.networkNotUsedByTemplates());
}
@Override
public AuditLogType getAuditLogTypeValue() {
return getSucceeded() ? AuditLogType.NETWORK_REMOVE_NETWORK : AuditLogType.NETWORK_REMOVE_NETWORK_FAILED;
}
}
|
Delete vnic profiles when removing a network
|
engine: Delete vnic profiles when removing a network
The patch removes vnic profiles when the network is
being deleted from the system.
Change-Id: I6472fb81262d7eb2cf6886a42ba17a637c804b22
Signed-off-by: Moti Asayag <[email protected]>
|
Java
|
apache-2.0
|
OpenUniversity/ovirt-engine,yapengsong/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,halober/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,halober/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,halober/ovirt-engine,walteryang47/ovirt-engine,eayun/ovirt-engine,eayun/ovirt-engine,yingyun001/ovirt-engine,zerodengxinchao/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,yapengsong/ovirt-engine,zerodengxinchao/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,OpenUniversity/ovirt-engine,halober/ovirt-engine,zerodengxinchao/ovirt-engine
|
java
|
## Code Before:
package org.ovirt.engine.core.bll.network.dc;
import org.ovirt.engine.core.bll.validator.NetworkValidator;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.action.AddNetworkStoragePoolParameters;
import org.ovirt.engine.core.common.errors.VdcBllMessages;
public class RemoveNetworkCommand<T extends AddNetworkStoragePoolParameters> extends NetworkCommon<T> {
public RemoveNetworkCommand(T parameters) {
super(parameters);
}
@Override
protected void executeCommand() {
getNetworkDAO().remove(getNetwork().getId());
setSucceeded(true);
}
@Override
protected void setActionMessageParameters() {
super.setActionMessageParameters();
addCanDoActionMessage(VdcBllMessages.VAR__ACTION__REMOVE);
}
@Override
protected boolean canDoAction() {
NetworkValidator validator = new NetworkValidator(getNetworkDAO().get(getNetwork().getId()));
return validate(validator.networkIsSet())
&& validate(validator.notManagementNetwork())
&& validate(validator.networkNotUsedByVms())
&& validate(validator.networkNotUsedByTemplates());
}
@Override
public AuditLogType getAuditLogTypeValue() {
return getSucceeded() ? AuditLogType.NETWORK_REMOVE_NETWORK : AuditLogType.NETWORK_REMOVE_NETWORK_FAILED;
}
}
## Instruction:
engine: Delete vnic profiles when removing a network
The patch removes vnic profiles when the network is
being deleted from the system.
Change-Id: I6472fb81262d7eb2cf6886a42ba17a637c804b22
Signed-off-by: Moti Asayag <[email protected]>
## Code After:
package org.ovirt.engine.core.bll.network.dc;
import org.ovirt.engine.core.bll.validator.NetworkValidator;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.action.AddNetworkStoragePoolParameters;
import org.ovirt.engine.core.common.errors.VdcBllMessages;
public class RemoveNetworkCommand<T extends AddNetworkStoragePoolParameters> extends NetworkCommon<T> {
public RemoveNetworkCommand(T parameters) {
super(parameters);
}
@Override
protected void executeCommand() {
removeVnicProfiles();
getNetworkDAO().remove(getNetwork().getId());
setSucceeded(true);
}
@Override
protected void setActionMessageParameters() {
super.setActionMessageParameters();
addCanDoActionMessage(VdcBllMessages.VAR__ACTION__REMOVE);
}
@Override
protected boolean canDoAction() {
NetworkValidator validator = new NetworkValidator(getNetworkDAO().get(getNetwork().getId()));
return validate(validator.networkIsSet())
&& validate(validator.notManagementNetwork())
&& validate(validator.networkNotUsedByVms())
&& validate(validator.networkNotUsedByTemplates());
}
@Override
public AuditLogType getAuditLogTypeValue() {
return getSucceeded() ? AuditLogType.NETWORK_REMOVE_NETWORK : AuditLogType.NETWORK_REMOVE_NETWORK_FAILED;
}
}
|
...
@Override
protected void executeCommand() {
removeVnicProfiles();
getNetworkDAO().remove(getNetwork().getId());
setSucceeded(true);
}
...
|
9a8544eaccde1420e6cbac7b4c5115155d6402f3
|
django_docutils/__about__.py
|
django_docutils/__about__.py
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
Add github + pypi to metadata
|
Add github + pypi to metadata
|
Python
|
mit
|
tony/django-docutils,tony/django-docutils
|
python
|
## Code Before:
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
## Instruction:
Add github + pypi to metadata
## Code After:
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
// ... existing code ...
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = '[email protected]'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
// ... rest of the code ...
|
99b72ab4e40a4ffca901b36d870947ffb5103da8
|
HadithHouseWebsite/textprocessing/regex.py
|
HadithHouseWebsite/textprocessing/regex.py
|
import re
class DocScanner(object):
"""
A class used to find certain tokens in a given document. The tokens can be
specified by regular expressions.
"""
def __init__(self, tokens_dict, callback):
"""
Initialize a new document scanner.
:param tokens_dict: A dictionary whose keys are the types of tokens and
values are the regex for finding such types of tokens.
:param callback: A function to be called whenever a token is found.
"""
self.types = list(tokens_dict.keys())
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document):
prev_match = None
prev_type = None
for curr_match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if curr_match.group(type) is not None:
self.callback(
type,
prev_type,
curr_match,
prev_match,
document
)
break
|
import re
class DocScanner(object):
"""
A class used to find certain tokens in a given document. The tokens can be
specified by regular expressions.
"""
def __init__(self, tokens_dict, callback):
"""
Initialize a new document scanner.
:param tokens_dict: A dictionary whose keys are the types of tokens and
values are the regex for finding such types of tokens.
:param callback: A function to be called whenever a token is found.
"""
self.types = list(tokens_dict.keys())
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document, context=None):
prev_match = None
prev_type = None
for match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if match.group(type) is not None:
self.callback(
type,
prev_type,
match,
prev_match,
document,
context
)
prev_type = type
prev_match = match
break
|
Support passing context to callback
|
feat(docscanner): Support passing context to callback
It might be useful to send some additional parameters to the callback
function. For example, you might want to write to a file in the
callback. This commit allows the user to pass an optional context to
the callback everytime it finds a match.
|
Python
|
mit
|
hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse
|
python
|
## Code Before:
import re
class DocScanner(object):
"""
A class used to find certain tokens in a given document. The tokens can be
specified by regular expressions.
"""
def __init__(self, tokens_dict, callback):
"""
Initialize a new document scanner.
:param tokens_dict: A dictionary whose keys are the types of tokens and
values are the regex for finding such types of tokens.
:param callback: A function to be called whenever a token is found.
"""
self.types = list(tokens_dict.keys())
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document):
prev_match = None
prev_type = None
for curr_match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if curr_match.group(type) is not None:
self.callback(
type,
prev_type,
curr_match,
prev_match,
document
)
break
## Instruction:
feat(docscanner): Support passing context to callback
It might be useful to send some additional parameters to the callback
function. For example, you might want to write to a file in the
callback. This commit allows the user to pass an optional context to
the callback everytime it finds a match.
## Code After:
import re
class DocScanner(object):
"""
A class used to find certain tokens in a given document. The tokens can be
specified by regular expressions.
"""
def __init__(self, tokens_dict, callback):
"""
Initialize a new document scanner.
:param tokens_dict: A dictionary whose keys are the types of tokens and
values are the regex for finding such types of tokens.
:param callback: A function to be called whenever a token is found.
"""
self.types = list(tokens_dict.keys())
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document, context=None):
prev_match = None
prev_type = None
for match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if match.group(type) is not None:
self.callback(
type,
prev_type,
match,
prev_match,
document,
context
)
prev_type = type
prev_match = match
break
|
# ... existing code ...
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document, context=None):
prev_match = None
prev_type = None
for match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if match.group(type) is not None:
self.callback(
type,
prev_type,
match,
prev_match,
document,
context
)
prev_type = type
prev_match = match
break
# ... rest of the code ...
|
013d0e3b2d8fdc212ae63f635a1e8def988672fa
|
tests/structures/test_sequences.py
|
tests/structures/test_sequences.py
|
import unittest
from ..utils import TranspileTestCase
class SequenceTests(TranspileTestCase):
def test_unpack_sequence(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b, c = x
print(a)
print(b)
print(c)
""")
@unittest.skip('Feature not yet implemented')
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b = x
print(a)
print(b)
""")
@unittest.skip('Feature not yet implemented')
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
a, b, c = x
print(a)
print(b)
print(c)
""")
|
import unittest
from ..utils import TranspileTestCase
class SequenceTests(TranspileTestCase):
def test_unpack_sequence(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b, c = x
print(a)
print(b)
print(c)
""")
@unittest.expectedFailure
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b = x
print(a)
print(b)
""")
@unittest.expectedFailure
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
a, b, c = x
print(a)
print(b)
print(c)
""")
|
Convert some skips to expected failures.
|
Convert some skips to expected failures.
|
Python
|
bsd-3-clause
|
glasnt/voc,freakboy3742/voc,ASP1234/voc,ASP1234/voc,Felix5721/voc,pombredanne/voc,pombredanne/voc,Felix5721/voc,cflee/voc,cflee/voc,gEt-rIgHt-jR/voc,glasnt/voc,gEt-rIgHt-jR/voc,freakboy3742/voc
|
python
|
## Code Before:
import unittest
from ..utils import TranspileTestCase
class SequenceTests(TranspileTestCase):
def test_unpack_sequence(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b, c = x
print(a)
print(b)
print(c)
""")
@unittest.skip('Feature not yet implemented')
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b = x
print(a)
print(b)
""")
@unittest.skip('Feature not yet implemented')
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
a, b, c = x
print(a)
print(b)
print(c)
""")
## Instruction:
Convert some skips to expected failures.
## Code After:
import unittest
from ..utils import TranspileTestCase
class SequenceTests(TranspileTestCase):
def test_unpack_sequence(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b, c = x
print(a)
print(b)
print(c)
""")
@unittest.expectedFailure
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
a, b = x
print(a)
print(b)
""")
@unittest.expectedFailure
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
a, b, c = x
print(a)
print(b)
print(c)
""")
|
# ... existing code ...
print(c)
""")
@unittest.expectedFailure
def test_unpack_sequence_overflow(self):
self.assertCodeExecution("""
x = [1, 2, 3]
# ... modified code ...
print(b)
""")
@unittest.expectedFailure
def test_unpack_sequence_underflow(self):
self.assertCodeExecution("""
x = [1, 2]
# ... rest of the code ...
|
322b07ff24932fb0e59114e30620e47501a84191
|
core/imt/inc/LinkDef.h
|
core/imt/inc/LinkDef.h
|
// Only for the autoload, autoparse. No IO of these classes is foreseen!
#pragma link C++ class ROOT::Internal::TPoolManager-;
#pragma link C++ class ROOT::TThreadExecutor-;
#pragma link C++ class ROOT::Experimental::TTaskGroup-;
#endif
|
// Only for the autoload, autoparse. No IO of these classes is foreseen!
// Exclude in case ROOT does not have IMT support
#ifdef R__USE_IMT
#pragma link C++ class ROOT::Internal::TPoolManager-;
#pragma link C++ class ROOT::TThreadExecutor-;
#pragma link C++ class ROOT::Experimental::TTaskGroup-;
#endif
#endif
|
Fix warning during dictionary generation in no-imt builds
|
[IMT] Fix warning during dictionary generation in no-imt builds
|
C
|
lgpl-2.1
|
karies/root,olifre/root,olifre/root,karies/root,karies/root,olifre/root,olifre/root,olifre/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,root-mirror/root,root-mirror/root,olifre/root,karies/root,karies/root,karies/root,root-mirror/root,root-mirror/root,olifre/root,karies/root,karies/root,karies/root,root-mirror/root,olifre/root,karies/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,karies/root
|
c
|
## Code Before:
// Only for the autoload, autoparse. No IO of these classes is foreseen!
#pragma link C++ class ROOT::Internal::TPoolManager-;
#pragma link C++ class ROOT::TThreadExecutor-;
#pragma link C++ class ROOT::Experimental::TTaskGroup-;
#endif
## Instruction:
[IMT] Fix warning during dictionary generation in no-imt builds
## Code After:
// Only for the autoload, autoparse. No IO of these classes is foreseen!
// Exclude in case ROOT does not have IMT support
#ifdef R__USE_IMT
#pragma link C++ class ROOT::Internal::TPoolManager-;
#pragma link C++ class ROOT::TThreadExecutor-;
#pragma link C++ class ROOT::Experimental::TTaskGroup-;
#endif
#endif
|
...
// Only for the autoload, autoparse. No IO of these classes is foreseen!
// Exclude in case ROOT does not have IMT support
#ifdef R__USE_IMT
#pragma link C++ class ROOT::Internal::TPoolManager-;
#pragma link C++ class ROOT::TThreadExecutor-;
#pragma link C++ class ROOT::Experimental::TTaskGroup-;
#endif
#endif
...
|
3131f282d6ad1a703939c91c0d7dc0b3e4e54046
|
iati/versions.py
|
iati/versions.py
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number."""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
Document the current state of the Version class.
|
Document the current state of the Version class.
|
Python
|
mit
|
IATI/iati.core,IATI/iati.core
|
python
|
## Code Before:
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number."""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
## Instruction:
Document the current state of the Version class.
## Code After:
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
if not version_string in iati.constants.STANDARD_VERSIONS:
raise ValueError('A valid version number must be specified.')
|
// ... existing code ...
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
// ... rest of the code ...
|
110c362e3e8436700707c2306d115b3b2476a79d
|
core/models.py
|
core/models.py
|
from os import makedirs
from os.path import join, exists
from urllib import urlretrieve
from django.conf import settings
from social_auth.signals import socialauth_registered
def create_profile(sender, user, response, details, **kwargs):
try:
# twitter
photo_url = response["profile_image_url"]
photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1))
except KeyError:
# facebook
photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"]
path = join(settings.MEDIA_ROOT, "photos")
if not exists(path):
makedirs(path)
urlretrieve(photo_url, join(path, str(user.id)))
socialauth_registered.connect(create_profile, sender=None)
|
from os import makedirs
from os.path import join, exists
from urllib import urlretrieve
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from social_auth.signals import socialauth_registered
class Account(models.Model):
"""
A user's account balance.
"""
user = models.OneToOneField(User)
balance = models.IntegerField(default=5000)
@receiver(post_save, sender=User)
def user_saved(sender, **kwargs):
"""
Create an initial account balance for new users.
"""
Account.objects.get_or_create(user=kwargs["instance"])
@receiver(socialauth_registered, sender=None)
def avatar(sender, user, response, details, **kwargs):
"""
Download the user's Twitter or Facebook avatar once they've
authenticated via either service.
"""
try:
# twitter
photo_url = response["profile_image_url"]
photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1))
except KeyError:
# facebook
photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"]
path = join(settings.MEDIA_ROOT, "photos")
if not exists(path):
makedirs(path)
urlretrieve(photo_url, join(path, str(user.id)))
|
Add initial account balance for users.
|
Add initial account balance for users.
|
Python
|
bsd-2-clause
|
stephenmcd/gamblor,stephenmcd/gamblor
|
python
|
## Code Before:
from os import makedirs
from os.path import join, exists
from urllib import urlretrieve
from django.conf import settings
from social_auth.signals import socialauth_registered
def create_profile(sender, user, response, details, **kwargs):
try:
# twitter
photo_url = response["profile_image_url"]
photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1))
except KeyError:
# facebook
photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"]
path = join(settings.MEDIA_ROOT, "photos")
if not exists(path):
makedirs(path)
urlretrieve(photo_url, join(path, str(user.id)))
socialauth_registered.connect(create_profile, sender=None)
## Instruction:
Add initial account balance for users.
## Code After:
from os import makedirs
from os.path import join, exists
from urllib import urlretrieve
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from social_auth.signals import socialauth_registered
class Account(models.Model):
"""
A user's account balance.
"""
user = models.OneToOneField(User)
balance = models.IntegerField(default=5000)
@receiver(post_save, sender=User)
def user_saved(sender, **kwargs):
"""
Create an initial account balance for new users.
"""
Account.objects.get_or_create(user=kwargs["instance"])
@receiver(socialauth_registered, sender=None)
def avatar(sender, user, response, details, **kwargs):
"""
Download the user's Twitter or Facebook avatar once they've
authenticated via either service.
"""
try:
# twitter
photo_url = response["profile_image_url"]
photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1))
except KeyError:
# facebook
photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"]
path = join(settings.MEDIA_ROOT, "photos")
if not exists(path):
makedirs(path)
urlretrieve(photo_url, join(path, str(user.id)))
|
// ... existing code ...
from urllib import urlretrieve
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from social_auth.signals import socialauth_registered
class Account(models.Model):
"""
A user's account balance.
"""
user = models.OneToOneField(User)
balance = models.IntegerField(default=5000)
@receiver(post_save, sender=User)
def user_saved(sender, **kwargs):
"""
Create an initial account balance for new users.
"""
Account.objects.get_or_create(user=kwargs["instance"])
@receiver(socialauth_registered, sender=None)
def avatar(sender, user, response, details, **kwargs):
"""
Download the user's Twitter or Facebook avatar once they've
authenticated via either service.
"""
try:
# twitter
photo_url = response["profile_image_url"]
// ... modified code ...
if not exists(path):
makedirs(path)
urlretrieve(photo_url, join(path, str(user.id)))
// ... rest of the code ...
|
2b2696dde438a46a7b831867111cc767a88bf77e
|
lib/DjangoLibrary.py
|
lib/DjangoLibrary.py
|
from robot.api import logger
import os
import signal
import subprocess
ROBOT_LIBRARY_DOC_FORMAT = 'reST'
class DjangoLibrary:
"""A library for testing Django with Robot Framework.
"""
django_pid = None
selenium_pid = None
# TEST CASE => New instance is created for every test case.
# TEST SUITE => New instance is created for every test suite.
# GLOBAL => Only one instance is created during the whole test execution.
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self, host="127.0.0.1", port=8000):
self.host = host
self.port = port
def start_django(self):
args = [
'python',
'mysite/manage.py',
'runserver',
'%s:%s' % (self.host, self.port),
'--nothreading',
'--noreload',
]
self.django_pid = subprocess.Popen(args).pid
logger.console(
"Django started (PID: %s)" % self.django_pid,
)
def stop_django(self):
os.kill(self.django_pid, signal.SIGKILL)
logger.console(
"Django stopped (PID: %s)" % self.django_pid,
)
|
__version__ = '0.1'
from robot.api import logger
import os
import signal
import subprocess
ROBOT_LIBRARY_DOC_FORMAT = 'reST'
class DjangoLibrary:
"""A library for testing Django with Robot Framework.
"""
django_pid = None
selenium_pid = None
# TEST CASE => New instance is created for every test case.
# TEST SUITE => New instance is created for every test suite.
# GLOBAL => Only one instance is created during the whole test execution.
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self, host="127.0.0.1", port=8000):
self.host = host
self.port = port
def start_django(self):
"""Start the Django server."""
args = [
'python',
'mysite/manage.py',
'runserver',
'%s:%s' % (self.host, self.port),
'--nothreading',
'--noreload',
]
self.django_pid = subprocess.Popen(args).pid
logger.console(
"Django started (PID: %s)" % self.django_pid,
)
def stop_django(self):
"""Stop Django server."""
os.kill(self.django_pid, signal.SIGKILL)
logger.console(
"Django stopped (PID: %s)" % self.django_pid,
)
|
Add version, utf-8 and some comments.
|
Add version, utf-8 and some comments.
|
Python
|
apache-2.0
|
kitconcept/robotframework-djangolibrary
|
python
|
## Code Before:
from robot.api import logger
import os
import signal
import subprocess
ROBOT_LIBRARY_DOC_FORMAT = 'reST'
class DjangoLibrary:
"""A library for testing Django with Robot Framework.
"""
django_pid = None
selenium_pid = None
# TEST CASE => New instance is created for every test case.
# TEST SUITE => New instance is created for every test suite.
# GLOBAL => Only one instance is created during the whole test execution.
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self, host="127.0.0.1", port=8000):
self.host = host
self.port = port
def start_django(self):
args = [
'python',
'mysite/manage.py',
'runserver',
'%s:%s' % (self.host, self.port),
'--nothreading',
'--noreload',
]
self.django_pid = subprocess.Popen(args).pid
logger.console(
"Django started (PID: %s)" % self.django_pid,
)
def stop_django(self):
os.kill(self.django_pid, signal.SIGKILL)
logger.console(
"Django stopped (PID: %s)" % self.django_pid,
)
## Instruction:
Add version, utf-8 and some comments.
## Code After:
__version__ = '0.1'
from robot.api import logger
import os
import signal
import subprocess
ROBOT_LIBRARY_DOC_FORMAT = 'reST'
class DjangoLibrary:
"""A library for testing Django with Robot Framework.
"""
django_pid = None
selenium_pid = None
# TEST CASE => New instance is created for every test case.
# TEST SUITE => New instance is created for every test suite.
# GLOBAL => Only one instance is created during the whole test execution.
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self, host="127.0.0.1", port=8000):
self.host = host
self.port = port
def start_django(self):
"""Start the Django server."""
args = [
'python',
'mysite/manage.py',
'runserver',
'%s:%s' % (self.host, self.port),
'--nothreading',
'--noreload',
]
self.django_pid = subprocess.Popen(args).pid
logger.console(
"Django started (PID: %s)" % self.django_pid,
)
def stop_django(self):
"""Stop Django server."""
os.kill(self.django_pid, signal.SIGKILL)
logger.console(
"Django stopped (PID: %s)" % self.django_pid,
)
|
// ... existing code ...
__version__ = '0.1'
from robot.api import logger
import os
// ... modified code ...
self.port = port
def start_django(self):
"""Start the Django server."""
args = [
'python',
'mysite/manage.py',
...
)
def stop_django(self):
"""Stop Django server."""
os.kill(self.django_pid, signal.SIGKILL)
logger.console(
"Django stopped (PID: %s)" % self.django_pid,
// ... rest of the code ...
|
be8344c2f796ecab60669630f4729c4ffa41c83b
|
web/impact/impact/v1/views/utils.py
|
web/impact/impact/v1/views/utils.py
|
def merge_data_by_id(data):
result = {}
for datum in data:
id = datum["id"]
item = result.get(id, {})
item.update(datum)
result[id] = item
return result.values()
def map_data(klass, query, order, data_keys, output_keys):
result = klass.objects.filter(query).order_by(order)
data = result.values_list(*data_keys)
return [dict(zip(output_keys, values))
for values in data]
|
def coalesce_dictionaries(data, merge_field="id"):
"Takes a sequence of dictionaries, merges those that share the
same merge_field, and returns a list of resulting dictionaries"
result = {}
for datum in data:
merge_id = datum[merge_field]
item = result.get(merge_id, {})
item.update(datum)
result[merge_id] = item
return result.values()
def map_data(klass, query, order, data_keys, output_keys):
result = klass.objects.filter(query).order_by(order)
data = result.values_list(*data_keys)
return [dict(zip(output_keys, values))
for values in data]
|
Rename merge_data_by_id, add doc-string, get rid of id as a local
|
[AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local
|
Python
|
mit
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
python
|
## Code Before:
def merge_data_by_id(data):
result = {}
for datum in data:
id = datum["id"]
item = result.get(id, {})
item.update(datum)
result[id] = item
return result.values()
def map_data(klass, query, order, data_keys, output_keys):
result = klass.objects.filter(query).order_by(order)
data = result.values_list(*data_keys)
return [dict(zip(output_keys, values))
for values in data]
## Instruction:
[AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local
## Code After:
def coalesce_dictionaries(data, merge_field="id"):
"Takes a sequence of dictionaries, merges those that share the
same merge_field, and returns a list of resulting dictionaries"
result = {}
for datum in data:
merge_id = datum[merge_field]
item = result.get(merge_id, {})
item.update(datum)
result[merge_id] = item
return result.values()
def map_data(klass, query, order, data_keys, output_keys):
result = klass.objects.filter(query).order_by(order)
data = result.values_list(*data_keys)
return [dict(zip(output_keys, values))
for values in data]
|
# ... existing code ...
def coalesce_dictionaries(data, merge_field="id"):
"Takes a sequence of dictionaries, merges those that share the
same merge_field, and returns a list of resulting dictionaries"
result = {}
for datum in data:
merge_id = datum[merge_field]
item = result.get(merge_id, {})
item.update(datum)
result[merge_id] = item
return result.values()
# ... rest of the code ...
|
7d50ca9b29a71a9cda2a5b78a0cb392108b217d5
|
roche/scripts/xml-server-load.py
|
roche/scripts/xml-server-load.py
|
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:[email protected]:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
Add comment for full url with non guest user
|
Add comment for full url with non guest user
|
Python
|
mit
|
beijingren/roche-website,beijingren/roche-website,beijingren/roche-website,beijingren/roche-website
|
python
|
## Code Before:
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
## Instruction:
Add comment for full url with non guest user
## Code After:
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:[email protected]:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
...
#
# Timeout higher?
#
#
# http://username:[email protected]:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
...
|
cc4bf1dc94a819a9ac185634af5603c0da33a3ef
|
annotation-rest/src/main/java/uk/ac/ebi/quickgo/annotation/AnnotationREST.java
|
annotation-rest/src/main/java/uk/ac/ebi/quickgo/annotation/AnnotationREST.java
|
package uk.ac.ebi.quickgo.annotation;
import uk.ac.ebi.quickgo.annotation.service.search.SearchServiceConfig;
import uk.ac.ebi.quickgo.rest.controller.CORSFilter;
import uk.ac.ebi.quickgo.rest.controller.SwaggerConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.solr.SolrRepositoriesAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
/**
*
* The RESTful service configuration for Annotations
*
* @author Tony Wardell
* Date: 26/04/2016
* Time: 14:41
* Created with IntelliJ IDEA.
*/
@SpringBootApplication(exclude = {SolrRepositoriesAutoConfiguration.class})
@ComponentScan({"uk.ac.ebi.quickgo.annotation.controller", "uk.ac.ebi.quickgo.rest"})
@Import({SearchServiceConfig.class, SwaggerConfig.class, CORSFilter.class})
public class AnnotationREST {
/**
* Ensures that placeholders are replaced with property values
*/
@Bean
static PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
public static void main(String[] args) {
SpringApplication.run(AnnotationREST.class, args);
}
}
|
package uk.ac.ebi.quickgo.annotation;
import uk.ac.ebi.quickgo.annotation.service.search.SearchServiceConfig;
import uk.ac.ebi.quickgo.rest.controller.CORSFilter;
import uk.ac.ebi.quickgo.rest.controller.SwaggerConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.solr.SolrRepositoriesAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
/**
*
* The RESTful service configuration for Annotations
*
* @author Tony Wardell
* Date: 26/04/2016
* Time: 14:41
* Created with IntelliJ IDEA.
*/
@SpringBootApplication(exclude = {SolrRepositoriesAutoConfiguration.class})
@ComponentScan({"uk.ac.ebi.quickgo.annotation.controller",
"uk.ac.ebi.quickgo.rest",
"uk.ac.ebi.quickgo.annotation.service.statistics"})
@Import({SearchServiceConfig.class, SwaggerConfig.class, CORSFilter.class})
public class AnnotationREST {
/**
* Ensures that placeholders are replaced with property values
*/
@Bean
static PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
public static void main(String[] args) {
SpringApplication.run(AnnotationREST.class, args);
}
}
|
Add statistics package to component scan.
|
Add statistics package to component scan.
|
Java
|
apache-2.0
|
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
|
java
|
## Code Before:
package uk.ac.ebi.quickgo.annotation;
import uk.ac.ebi.quickgo.annotation.service.search.SearchServiceConfig;
import uk.ac.ebi.quickgo.rest.controller.CORSFilter;
import uk.ac.ebi.quickgo.rest.controller.SwaggerConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.solr.SolrRepositoriesAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
/**
*
* The RESTful service configuration for Annotations
*
* @author Tony Wardell
* Date: 26/04/2016
* Time: 14:41
* Created with IntelliJ IDEA.
*/
@SpringBootApplication(exclude = {SolrRepositoriesAutoConfiguration.class})
@ComponentScan({"uk.ac.ebi.quickgo.annotation.controller", "uk.ac.ebi.quickgo.rest"})
@Import({SearchServiceConfig.class, SwaggerConfig.class, CORSFilter.class})
public class AnnotationREST {
/**
* Ensures that placeholders are replaced with property values
*/
@Bean
static PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
public static void main(String[] args) {
SpringApplication.run(AnnotationREST.class, args);
}
}
## Instruction:
Add statistics package to component scan.
## Code After:
package uk.ac.ebi.quickgo.annotation;
import uk.ac.ebi.quickgo.annotation.service.search.SearchServiceConfig;
import uk.ac.ebi.quickgo.rest.controller.CORSFilter;
import uk.ac.ebi.quickgo.rest.controller.SwaggerConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.solr.SolrRepositoriesAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
/**
*
* The RESTful service configuration for Annotations
*
* @author Tony Wardell
* Date: 26/04/2016
* Time: 14:41
* Created with IntelliJ IDEA.
*/
@SpringBootApplication(exclude = {SolrRepositoriesAutoConfiguration.class})
@ComponentScan({"uk.ac.ebi.quickgo.annotation.controller",
"uk.ac.ebi.quickgo.rest",
"uk.ac.ebi.quickgo.annotation.service.statistics"})
@Import({SearchServiceConfig.class, SwaggerConfig.class, CORSFilter.class})
public class AnnotationREST {
/**
* Ensures that placeholders are replaced with property values
*/
@Bean
static PropertySourcesPlaceholderConfigurer propertyPlaceHolderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
public static void main(String[] args) {
SpringApplication.run(AnnotationREST.class, args);
}
}
|
...
* Created with IntelliJ IDEA.
*/
@SpringBootApplication(exclude = {SolrRepositoriesAutoConfiguration.class})
@ComponentScan({"uk.ac.ebi.quickgo.annotation.controller",
"uk.ac.ebi.quickgo.rest",
"uk.ac.ebi.quickgo.annotation.service.statistics"})
@Import({SearchServiceConfig.class, SwaggerConfig.class, CORSFilter.class})
public class AnnotationREST {
...
|
5cf0b19d67a667d4e0d48a12f0ee94f3387cfa37
|
tests/test_helpers.py
|
tests/test_helpers.py
|
import testtools
from talons import helpers
from tests import base
class TestHelpers(base.TestCase):
def test_bad_import(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('not.exist.function')
def test_no_function_in_module(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('sys.noexisting')
def test_not_callable(self):
with testtools.ExpectedException(TypeError):
helpers.import_function('sys.stdout')
|
import testtools
from talons import helpers
from tests import base
class TestHelpers(base.TestCase):
def test_bad_import(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('not.exist.function')
def test_no_function_in_module(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('sys.noexisting')
def test_not_callable(self):
with testtools.ExpectedException(TypeError):
helpers.import_function('sys.stdout')
def test_return_function(self):
fn = helpers.import_function('os.path.join')
self.assertEqual(callable(fn), True)
|
Add test to ensure talons.helpers.import_function returns a callable
|
Add test to ensure talons.helpers.import_function returns a callable
|
Python
|
apache-2.0
|
talons/talons,jaypipes/talons
|
python
|
## Code Before:
import testtools
from talons import helpers
from tests import base
class TestHelpers(base.TestCase):
def test_bad_import(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('not.exist.function')
def test_no_function_in_module(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('sys.noexisting')
def test_not_callable(self):
with testtools.ExpectedException(TypeError):
helpers.import_function('sys.stdout')
## Instruction:
Add test to ensure talons.helpers.import_function returns a callable
## Code After:
import testtools
from talons import helpers
from tests import base
class TestHelpers(base.TestCase):
def test_bad_import(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('not.exist.function')
def test_no_function_in_module(self):
with testtools.ExpectedException(ImportError):
helpers.import_function('sys.noexisting')
def test_not_callable(self):
with testtools.ExpectedException(TypeError):
helpers.import_function('sys.stdout')
def test_return_function(self):
fn = helpers.import_function('os.path.join')
self.assertEqual(callable(fn), True)
|
...
def test_not_callable(self):
with testtools.ExpectedException(TypeError):
helpers.import_function('sys.stdout')
def test_return_function(self):
fn = helpers.import_function('os.path.join')
self.assertEqual(callable(fn), True)
...
|
855724c4e52a55d141e2ef72cf7181710fb33d44
|
dwitter/user/urls.py
|
dwitter/user/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>[a-z0-9]*)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>[a-z0-9]*)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>\w+)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>\w+)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
|
Fix error when usernames have capital characters
|
Fix error when usernames have capital characters
|
Python
|
apache-2.0
|
lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter
|
python
|
## Code Before:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>[a-z0-9]*)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>[a-z0-9]*)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
## Instruction:
Fix error when usernames have capital characters
## Code After:
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>\w+)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>\w+)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
|
// ... existing code ...
from . import views
urlpatterns = [
url(r'^(?P<url_username>\w+)$', views.user_feed, {'page_nr':'1'}, name='user_feed'),
url(r'^(?P<url_username>\w+)/(?P<page_nr>\d+)$', views.user_feed, name='user_feed_page'),
]
// ... rest of the code ...
|
1afc43dc46ef38e5581898fd8f99a00cb81bbe5f
|
butterknife-annotations/src/main/java/butterknife/BindArray.java
|
butterknife-annotations/src/main/java/butterknife/BindArray.java
|
package butterknife;
import android.support.annotation.ArrayRes;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.CLASS;
/**
* Bind a field to the specified array resource ID. The type of array will be inferred from the
* annotated element.
*
* String array:
* <pre><code>
* {@literal @}BindArray(R.array.countries) String[] countries;
* </code></pre>
*
* Int array:
* <pre><code>
* {@literal @}BindArray(R.array.phones) int[] phones;
* </code></pre>
*
* Text array:
* <pre><code>
* {@literal @}BindArray(R.array.options) CharSequence[] options;
* </code></pre>
*
* {@link android.content.res.TypedArray}:
* <pre><code>
* {@literal @}BindArray(R.array.icons) TypedArray icons;
* </code></pre>
*/
@Retention(CLASS) @Target(FIELD)
public @interface BindArray {
/** Array resource ID to which the field will be bound. */
@ArrayRes int value();
}
|
package butterknife;
import android.support.annotation.ArrayRes;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Bind a field to the specified array resource ID. The type of array will be inferred from the
* annotated element.
*
* String array:
* <pre><code>
* {@literal @}BindArray(R.array.countries) String[] countries;
* </code></pre>
*
* Int array:
* <pre><code>
* {@literal @}BindArray(R.array.phones) int[] phones;
* </code></pre>
*
* Text array:
* <pre><code>
* {@literal @}BindArray(R.array.options) CharSequence[] options;
* </code></pre>
*
* {@link android.content.res.TypedArray}:
* <pre><code>
* {@literal @}BindArray(R.array.icons) TypedArray icons;
* </code></pre>
*/
@Retention(RUNTIME) @Target(FIELD)
public @interface BindArray {
/** Array resource ID to which the field will be bound. */
@ArrayRes int value();
}
|
Fix annotation retention for reflect usage
|
Fix annotation retention for reflect usage
|
Java
|
apache-2.0
|
hzsweers/butterknife,ze-pequeno/butterknife,JakeWharton/butterknife,hzsweers/butterknife,JakeWharton/butterknife,ze-pequeno/butterknife,JakeWharton/butterknife,ze-pequeno/butterknife,ze-pequeno/butterknife,hzsweers/butterknife,JakeWharton/butterknife
|
java
|
## Code Before:
package butterknife;
import android.support.annotation.ArrayRes;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.CLASS;
/**
* Bind a field to the specified array resource ID. The type of array will be inferred from the
* annotated element.
*
* String array:
* <pre><code>
* {@literal @}BindArray(R.array.countries) String[] countries;
* </code></pre>
*
* Int array:
* <pre><code>
* {@literal @}BindArray(R.array.phones) int[] phones;
* </code></pre>
*
* Text array:
* <pre><code>
* {@literal @}BindArray(R.array.options) CharSequence[] options;
* </code></pre>
*
* {@link android.content.res.TypedArray}:
* <pre><code>
* {@literal @}BindArray(R.array.icons) TypedArray icons;
* </code></pre>
*/
@Retention(CLASS) @Target(FIELD)
public @interface BindArray {
/** Array resource ID to which the field will be bound. */
@ArrayRes int value();
}
## Instruction:
Fix annotation retention for reflect usage
## Code After:
package butterknife;
import android.support.annotation.ArrayRes;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Bind a field to the specified array resource ID. The type of array will be inferred from the
* annotated element.
*
* String array:
* <pre><code>
* {@literal @}BindArray(R.array.countries) String[] countries;
* </code></pre>
*
* Int array:
* <pre><code>
* {@literal @}BindArray(R.array.phones) int[] phones;
* </code></pre>
*
* Text array:
* <pre><code>
* {@literal @}BindArray(R.array.options) CharSequence[] options;
* </code></pre>
*
* {@link android.content.res.TypedArray}:
* <pre><code>
* {@literal @}BindArray(R.array.icons) TypedArray icons;
* </code></pre>
*/
@Retention(RUNTIME) @Target(FIELD)
public @interface BindArray {
/** Array resource ID to which the field will be bound. */
@ArrayRes int value();
}
|
// ... existing code ...
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Bind a field to the specified array resource ID. The type of array will be inferred from the
// ... modified code ...
* {@literal @}BindArray(R.array.icons) TypedArray icons;
* </code></pre>
*/
@Retention(RUNTIME) @Target(FIELD)
public @interface BindArray {
/** Array resource ID to which the field will be bound. */
@ArrayRes int value();
// ... rest of the code ...
|
f5592efd0cf780c6e97483a16820f98478be8e3d
|
devil/devil/android/sdk/version_codes.py
|
devil/devil/android/sdk/version_codes.py
|
JELLY_BEAN = 16
JELLY_BEAN_MR1 = 17
JELLY_BEAN_MR2 = 18
KITKAT = 19
KITKAT_WATCH = 20
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
|
JELLY_BEAN = 16
JELLY_BEAN_MR1 = 17
JELLY_BEAN_MR2 = 18
KITKAT = 19
KITKAT_WATCH = 20
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
NOUGAT = 24
|
Add NOUGAT version code constant.
|
Add NOUGAT version code constant.
Review-Url: https://codereview.chromium.org/2386453002
|
Python
|
bsd-3-clause
|
sahiljain/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult
|
python
|
## Code Before:
JELLY_BEAN = 16
JELLY_BEAN_MR1 = 17
JELLY_BEAN_MR2 = 18
KITKAT = 19
KITKAT_WATCH = 20
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
## Instruction:
Add NOUGAT version code constant.
Review-Url: https://codereview.chromium.org/2386453002
## Code After:
JELLY_BEAN = 16
JELLY_BEAN_MR1 = 17
JELLY_BEAN_MR2 = 18
KITKAT = 19
KITKAT_WATCH = 20
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
NOUGAT = 24
|
...
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
NOUGAT = 24
...
|
c6f594d36cb44a77f6ca88ce9b1282cc8f70f480
|
nabl2.terms/src/main/java/mb/nabl2/util/CapsuleUtil.java
|
nabl2.terms/src/main/java/mb/nabl2/util/CapsuleUtil.java
|
package mb.nabl2.util;
import java.util.Map.Entry;
import org.metaborg.util.functions.Function2;
import io.usethesource.capsule.Map;
public final class CapsuleUtil {
private CapsuleUtil() {
}
public static <K, V> void replace(Map.Transient<K, V> map, Function2<K, V, V> mapper) {
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
map.__put(key, val);
} else {
map.__remove(key);
}
}
}
public static <K, V> Map.Immutable<K, V> replace(Map.Immutable<K, V> map, Function2<K, V, V> mapper) {
final Map.Transient<K, V> newMap = Map.Transient.of();
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
newMap.__put(key, val);
}
}
return newMap.freeze();
}
}
|
package mb.nabl2.util;
import java.util.Map.Entry;
import org.metaborg.util.functions.Function2;
import io.usethesource.capsule.Map;
import io.usethesource.capsule.Set;
public final class CapsuleUtil {
private CapsuleUtil() {
}
public static <K, V> void replace(Map.Transient<K, V> map, Function2<K, V, V> mapper) {
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
map.__put(key, val);
} else {
map.__remove(key);
}
}
}
public static <K, V> Map.Immutable<K, V> replace(Map.Immutable<K, V> map, Function2<K, V, V> mapper) {
final Map.Transient<K, V> newMap = Map.Transient.of();
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
newMap.__put(key, val);
}
}
return newMap.freeze();
}
@SuppressWarnings("unchecked") public static <V> Set.Immutable<V> toSet(Iterable<? extends V> values) {
if(values instanceof Set.Immutable) {
return (Set.Immutable<V>) values;
}
Set.Transient<V> set = Set.Transient.of();
for(V value : values) {
set.__insert(value);
}
return set.freeze();
}
}
|
Add method to construct Capsule set from Iterable.
|
Add method to construct Capsule set from Iterable.
|
Java
|
apache-2.0
|
metaborg/nabl,metaborg/nabl,metaborg/nabl
|
java
|
## Code Before:
package mb.nabl2.util;
import java.util.Map.Entry;
import org.metaborg.util.functions.Function2;
import io.usethesource.capsule.Map;
public final class CapsuleUtil {
private CapsuleUtil() {
}
public static <K, V> void replace(Map.Transient<K, V> map, Function2<K, V, V> mapper) {
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
map.__put(key, val);
} else {
map.__remove(key);
}
}
}
public static <K, V> Map.Immutable<K, V> replace(Map.Immutable<K, V> map, Function2<K, V, V> mapper) {
final Map.Transient<K, V> newMap = Map.Transient.of();
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
newMap.__put(key, val);
}
}
return newMap.freeze();
}
}
## Instruction:
Add method to construct Capsule set from Iterable.
## Code After:
package mb.nabl2.util;
import java.util.Map.Entry;
import org.metaborg.util.functions.Function2;
import io.usethesource.capsule.Map;
import io.usethesource.capsule.Set;
public final class CapsuleUtil {
private CapsuleUtil() {
}
public static <K, V> void replace(Map.Transient<K, V> map, Function2<K, V, V> mapper) {
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
map.__put(key, val);
} else {
map.__remove(key);
}
}
}
public static <K, V> Map.Immutable<K, V> replace(Map.Immutable<K, V> map, Function2<K, V, V> mapper) {
final Map.Transient<K, V> newMap = Map.Transient.of();
for(Entry<K, V> entry : map.entrySet()) {
final K key = entry.getKey();
final V val = mapper.apply(key, entry.getValue());
if(val != null) {
newMap.__put(key, val);
}
}
return newMap.freeze();
}
@SuppressWarnings("unchecked") public static <V> Set.Immutable<V> toSet(Iterable<? extends V> values) {
if(values instanceof Set.Immutable) {
return (Set.Immutable<V>) values;
}
Set.Transient<V> set = Set.Transient.of();
for(V value : values) {
set.__insert(value);
}
return set.freeze();
}
}
|
...
import org.metaborg.util.functions.Function2;
import io.usethesource.capsule.Map;
import io.usethesource.capsule.Set;
public final class CapsuleUtil {
...
return newMap.freeze();
}
@SuppressWarnings("unchecked") public static <V> Set.Immutable<V> toSet(Iterable<? extends V> values) {
if(values instanceof Set.Immutable) {
return (Set.Immutable<V>) values;
}
Set.Transient<V> set = Set.Transient.of();
for(V value : values) {
set.__insert(value);
}
return set.freeze();
}
}
...
|
d3163d8a7695da9687f82d9d40c6767322998fc2
|
python/ql/test/experimental/dataflow/tainttracking/defaultAdditionalTaintStep-py3/test_collections.py
|
python/ql/test/experimental/dataflow/tainttracking/defaultAdditionalTaintStep-py3/test_collections.py
|
import sys; import os; sys.path.append(os.path.dirname(os.path.dirname((__file__))))
from taintlib import *
# This has no runtime impact, but allows autocomplete to work
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..taintlib import *
# Actual tests
def test_access():
tainted_list = TAINTED_LIST
ensure_tainted(
tainted_list.copy(), # $ tainted
)
def list_clear():
tainted_string = TAINTED_STRING
tainted_list = [tainted_string]
ensure_tainted(tainted_list) # $ tainted
tainted_list.clear()
ensure_not_tainted(tainted_list) # $ SPURIOUS: tainted
# Make tests runable
test_access()
list_clear()
|
import sys; import os; sys.path.append(os.path.dirname(os.path.dirname((__file__))))
from taintlib import *
# This has no runtime impact, but allows autocomplete to work
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..taintlib import *
# Actual tests
def test_access():
tainted_list = TAINTED_LIST
ensure_tainted(
tainted_list.copy(), # $ tainted
)
for ((x, y, *z), a, b) in tainted_list:
ensure_tainted(
x, # $ tainted
y, # $ tainted
z, # $ tainted
a, # $ tainted
b, # $ tainted
)
def list_clear():
tainted_string = TAINTED_STRING
tainted_list = [tainted_string]
ensure_tainted(tainted_list) # $ tainted
tainted_list.clear()
ensure_not_tainted(tainted_list) # $ SPURIOUS: tainted
# Make tests runable
test_access()
list_clear()
|
Add iterable-unpacking in for test
|
Python: Add iterable-unpacking in for test
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
python
|
## Code Before:
import sys; import os; sys.path.append(os.path.dirname(os.path.dirname((__file__))))
from taintlib import *
# This has no runtime impact, but allows autocomplete to work
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..taintlib import *
# Actual tests
def test_access():
tainted_list = TAINTED_LIST
ensure_tainted(
tainted_list.copy(), # $ tainted
)
def list_clear():
tainted_string = TAINTED_STRING
tainted_list = [tainted_string]
ensure_tainted(tainted_list) # $ tainted
tainted_list.clear()
ensure_not_tainted(tainted_list) # $ SPURIOUS: tainted
# Make tests runable
test_access()
list_clear()
## Instruction:
Python: Add iterable-unpacking in for test
## Code After:
import sys; import os; sys.path.append(os.path.dirname(os.path.dirname((__file__))))
from taintlib import *
# This has no runtime impact, but allows autocomplete to work
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..taintlib import *
# Actual tests
def test_access():
tainted_list = TAINTED_LIST
ensure_tainted(
tainted_list.copy(), # $ tainted
)
for ((x, y, *z), a, b) in tainted_list:
ensure_tainted(
x, # $ tainted
y, # $ tainted
z, # $ tainted
a, # $ tainted
b, # $ tainted
)
def list_clear():
tainted_string = TAINTED_STRING
tainted_list = [tainted_string]
ensure_tainted(tainted_list) # $ tainted
tainted_list.clear()
ensure_not_tainted(tainted_list) # $ SPURIOUS: tainted
# Make tests runable
test_access()
list_clear()
|
...
tainted_list.copy(), # $ tainted
)
for ((x, y, *z), a, b) in tainted_list:
ensure_tainted(
x, # $ tainted
y, # $ tainted
z, # $ tainted
a, # $ tainted
b, # $ tainted
)
def list_clear():
tainted_string = TAINTED_STRING
...
|
49ce9aa1bdd3479c31b8aa2e606b1768a444aea2
|
irrigator_pro/farms/templatetags/today_filters.py
|
irrigator_pro/farms/templatetags/today_filters.py
|
from django import template
from datetime import date, datetime, timedelta
register = template.Library()
@register.filter(expects_localtime=True)
def is_today(value):
if isinstance(value, datetime):
value = value.date()
return value == date.today()
@register.filter(expects_localtime=True)
def is_past(value):
if isinstance(value, datetime):
value = value.date()
return value < date.today()
@register.filter(expects_localtime=True)
def is_future(value):
if isinstance(value, datetime):
value = value.date()
return value > date.today()
@register.filter(expects_localtime=True)
def compare_today(value):
if isinstance(value, datetime):
value = value.date()
return value - date.today()
|
from django import template
from datetime import date, datetime, timedelta
register = template.Library()
@register.filter(expects_localtime=True)
def is_today(value):
if isinstance(value, datetime):
value = value.date()
return value == date.today()
@register.filter(expects_localtime=True)
def is_past(value):
if isinstance(value, datetime):
value = value.date()
return value < date.today()
@register.filter(expects_localtime=True)
def is_future(value):
if isinstance(value, datetime):
value = value.date()
return value > date.today()
@register.filter(expects_localtime=True)
def compare_today(value):
if isinstance(value, datetime):
value = value.date()
return value - date.today()
@register.filter(expects_locattime=True)
def today_in_season(season):
start_date = season.season_start_date
end_date = season.season_end_date
return (start_date <= date.today() <= end_date)
|
Add new filter to determine if today is within the time period for a season.
|
Add new filter to determine if today is within the time period for a season.
|
Python
|
mit
|
warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro
|
python
|
## Code Before:
from django import template
from datetime import date, datetime, timedelta
register = template.Library()
@register.filter(expects_localtime=True)
def is_today(value):
if isinstance(value, datetime):
value = value.date()
return value == date.today()
@register.filter(expects_localtime=True)
def is_past(value):
if isinstance(value, datetime):
value = value.date()
return value < date.today()
@register.filter(expects_localtime=True)
def is_future(value):
if isinstance(value, datetime):
value = value.date()
return value > date.today()
@register.filter(expects_localtime=True)
def compare_today(value):
if isinstance(value, datetime):
value = value.date()
return value - date.today()
## Instruction:
Add new filter to determine if today is within the time period for a season.
## Code After:
from django import template
from datetime import date, datetime, timedelta
register = template.Library()
@register.filter(expects_localtime=True)
def is_today(value):
if isinstance(value, datetime):
value = value.date()
return value == date.today()
@register.filter(expects_localtime=True)
def is_past(value):
if isinstance(value, datetime):
value = value.date()
return value < date.today()
@register.filter(expects_localtime=True)
def is_future(value):
if isinstance(value, datetime):
value = value.date()
return value > date.today()
@register.filter(expects_localtime=True)
def compare_today(value):
if isinstance(value, datetime):
value = value.date()
return value - date.today()
@register.filter(expects_locattime=True)
def today_in_season(season):
start_date = season.season_start_date
end_date = season.season_end_date
return (start_date <= date.today() <= end_date)
|
...
if isinstance(value, datetime):
value = value.date()
return value - date.today()
@register.filter(expects_locattime=True)
def today_in_season(season):
start_date = season.season_start_date
end_date = season.season_end_date
return (start_date <= date.today() <= end_date)
...
|
89593cc22f8de4bdb6d605b2e4d6e04b0d1fcd61
|
microcosm_postgres/types.py
|
microcosm_postgres/types.py
|
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
Handle non-enum inputs (if they are enum names)
|
Handle non-enum inputs (if they are enum names)
|
Python
|
apache-2.0
|
globality-corp/microcosm-postgres,globality-corp/microcosm-postgres
|
python
|
## Code Before:
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
## Instruction:
Handle non-enum inputs (if they are enum names)
## Code After:
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
# ... existing code ...
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
# ... modified code ...
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
# ... rest of the code ...
|
017182e317aa33c0bb4c13541ef19b11bb48e250
|
members/views.py
|
members/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
|
from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
Add view for searching users and return json format
|
Add view for searching users and return json format
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
python
|
## Code Before:
from django.shortcuts import render
from django.http import HttpResponse
from .models import User
def homepage(request):
return render(request, "index.html", {})
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return HttpResponse(json_data, mimetype='application/json')
## Instruction:
Add view for searching users and return json format
## Code After:
from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
def homepage(request):
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
User.objects.filter(username__icontains=name)
json_data = [dict(
id=member.id,
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
|
...
from django.shortcuts import render
from django.http import HttpResponse
from hackfmi.utils import json_view
from .models import User
...
return render(request, "index.html", {})
@json_view
def search(request, name):
members = User.objects.filter(first_name__icontains=name) or \
User.objects.filter(last_name__icontains=name) or \
...
full_name=' '.join([member.first_name, member.last_name]))
for member in members]
return json_data
...
|
b947c3564e694e55b344048bed7e3c1b3c900eab
|
src/main/java/com/datasift/client/pylon/PylonParametersData.java
|
src/main/java/com/datasift/client/pylon/PylonParametersData.java
|
package com.datasift.client.pylon;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
public class PylonParametersData {
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_EMPTY)
protected String interval;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected float span;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected int threshold;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected String target;
public PylonParametersData() { }
public PylonParametersData(String interval, Float span, Integer threshold, String target) {
this.interval = interval;
this.span = span;
this.threshold = threshold;
this.target = target;
}
public Float getSpan() {
return this.span;
}
public Integer getThreshold() {
return this.threshold;
}
public String getTarget() {
return this.target;
}
public void setInterval(String interval) {
this.interval = interval;
}
public void setSpan(Float span) {
this.span = span;
}
public void setThreshold(Integer threshold) {
this.threshold = threshold;
}
public void setTarget(String target) {
this.target = target;
}
}
|
package com.datasift.client.pylon;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
public class PylonParametersData {
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_EMPTY)
protected String interval;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected float span;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected int threshold;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected String target;
public PylonParametersData() { }
public PylonParametersData(String interval, Float span, Integer threshold, String target) {
this.interval = interval;
this.target = target;
if (span != null) this.span = span;
if (threshold != null) this.threshold = threshold;
}
public Float getSpan() {
return this.span;
}
public Integer getThreshold() {
return this.threshold;
}
public String getTarget() {
return this.target;
}
public void setInterval(String interval) {
this.interval = interval;
}
public void setSpan(Float span) {
this.span = span;
}
public void setThreshold(Integer threshold) {
this.threshold = threshold;
}
public void setTarget(String target) {
this.target = target;
}
}
|
Check for null values when setting primitives.
|
Check for null values when setting primitives.
|
Java
|
mit
|
datasift/datasift-java,datasift/datasift-java
|
java
|
## Code Before:
package com.datasift.client.pylon;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
public class PylonParametersData {
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_EMPTY)
protected String interval;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected float span;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected int threshold;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected String target;
public PylonParametersData() { }
public PylonParametersData(String interval, Float span, Integer threshold, String target) {
this.interval = interval;
this.span = span;
this.threshold = threshold;
this.target = target;
}
public Float getSpan() {
return this.span;
}
public Integer getThreshold() {
return this.threshold;
}
public String getTarget() {
return this.target;
}
public void setInterval(String interval) {
this.interval = interval;
}
public void setSpan(Float span) {
this.span = span;
}
public void setThreshold(Integer threshold) {
this.threshold = threshold;
}
public void setTarget(String target) {
this.target = target;
}
}
## Instruction:
Check for null values when setting primitives.
## Code After:
package com.datasift.client.pylon;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
public class PylonParametersData {
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_EMPTY)
protected String interval;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected float span;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected int threshold;
@JsonProperty
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
protected String target;
public PylonParametersData() { }
public PylonParametersData(String interval, Float span, Integer threshold, String target) {
this.interval = interval;
this.target = target;
if (span != null) this.span = span;
if (threshold != null) this.threshold = threshold;
}
public Float getSpan() {
return this.span;
}
public Integer getThreshold() {
return this.threshold;
}
public String getTarget() {
return this.target;
}
public void setInterval(String interval) {
this.interval = interval;
}
public void setSpan(Float span) {
this.span = span;
}
public void setThreshold(Integer threshold) {
this.threshold = threshold;
}
public void setTarget(String target) {
this.target = target;
}
}
|
# ... existing code ...
public PylonParametersData(String interval, Float span, Integer threshold, String target) {
this.interval = interval;
this.target = target;
if (span != null) this.span = span;
if (threshold != null) this.threshold = threshold;
}
public Float getSpan() {
# ... rest of the code ...
|
aa94c28835a67ca000226eb30bdbb0ef852383c5
|
jshbot/configurations.py
|
jshbot/configurations.py
|
import json
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='json'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
key -- Gets the specified key from the config file, otherwise everything.
extra -- Looks for <plugin_name>-<extra>.<extension>
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(
bot.path, plugin_name, extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
except KeyError:
raise CBException(
"Plugin {} not found in the configurations dictionary.".format(plugin_name))
try:
if key:
return config[key]
else:
return config
except KeyError:
raise CBException("Key {} not found in the configuration file.".format(key))
try:
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
raise CBException("File {} not found.".format(filename))
except Exception as e:
raise CBException("Failed to read {} properly.".format(filename), e=e)
|
import json
import yaml
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='yaml'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
key -- Gets the specified key from the config file, otherwise everything.
extra -- Looks for <plugin_name>-<extra>.<extension>
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(bot.path, plugin_name[:-3], extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
except KeyError:
raise CBException(
"Plugin {} not found in the configurations dictionary.".format(plugin_name))
try:
if key:
return config[key]
else:
return config
except KeyError:
raise CBException("Key {} not found in the configuration file.".format(key))
try:
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
elif extension.lower() == 'yaml':
return yaml.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
raise CBException("File {} not found.".format(filename))
except Exception as e:
raise CBException("Failed to read {} properly.".format(filename), e=e)
|
Change default extension to yaml
|
Change default extension to yaml
|
Python
|
mit
|
jkchen2/JshBot,jkchen2/JshBot
|
python
|
## Code Before:
import json
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='json'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
key -- Gets the specified key from the config file, otherwise everything.
extra -- Looks for <plugin_name>-<extra>.<extension>
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(
bot.path, plugin_name, extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
except KeyError:
raise CBException(
"Plugin {} not found in the configurations dictionary.".format(plugin_name))
try:
if key:
return config[key]
else:
return config
except KeyError:
raise CBException("Key {} not found in the configuration file.".format(key))
try:
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
raise CBException("File {} not found.".format(filename))
except Exception as e:
raise CBException("Failed to read {} properly.".format(filename), e=e)
## Instruction:
Change default extension to yaml
## Code After:
import json
import yaml
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='yaml'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
key -- Gets the specified key from the config file, otherwise everything.
extra -- Looks for <plugin_name>-<extra>.<extension>
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(bot.path, plugin_name[:-3], extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
except KeyError:
raise CBException(
"Plugin {} not found in the configurations dictionary.".format(plugin_name))
try:
if key:
return config[key]
else:
return config
except KeyError:
raise CBException("Key {} not found in the configuration file.".format(key))
try:
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
elif extension.lower() == 'yaml':
return yaml.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
raise CBException("File {} not found.".format(filename))
except Exception as e:
raise CBException("Failed to read {} properly.".format(filename), e=e)
|
...
import json
import yaml
from jshbot.exceptions import ConfiguredBotException, ErrorTypes
...
CBException = ConfiguredBotException('Configurations')
def get(bot, plugin_name, key=None, extra=None, extension='yaml'):
"""Gets the configuration file for the given plugin.
Keyword arguments:
...
extension -- If 'json', reads the file as json, otherwise reads it as text.
"""
if extra: # Open from external configuration file
filename = '{0}/config/{1}-{2}.{3}'.format(bot.path, plugin_name[:-3], extra, extension)
else: # Open from configuration dictionary
try:
config = bot.configurations[plugin_name]
...
with open(filename, 'r') as config_file:
if extension.lower() == 'json':
return json.load(config_file)
elif extension.lower() == 'yaml':
return yaml.load(config_file)
else:
return config_file.read()
except FileNotFoundError:
...
|
a8112a8ee3723d5ae097998efc7c43bd27cbee95
|
engineer/processors.py
|
engineer/processors.py
|
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = '[email protected]'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
|
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = '[email protected]'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
Handle LESS preprocessor errors more gracefully.
|
Handle LESS preprocessor errors more gracefully.
|
Python
|
mit
|
tylerbutler/engineer,tylerbutler/engineer,tylerbutler/engineer
|
python
|
## Code Before:
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = '[email protected]'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
## Instruction:
Handle LESS preprocessor errors more gracefully.
## Code After:
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = '[email protected]'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
...
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
...
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
...
|
ae4ad35270a5daaa58b4988820da9d342d8e4751
|
examples/basic_example.py
|
examples/basic_example.py
|
from chatterbot import ChatBot
# Create a new chat bot named Charlie
chatbot = ChatBot('Charlie')
# Get a response to the input text 'How are you?'
response = chatbot.get_response('How are you?')
print(response)
|
from chatterbot import ChatBot
# Create a new chat bot named Charlie
chatbot = ChatBot(
'Charlie',
trainer='chatterbot.trainers.ListTrainer'
)
chatbot.train([
"Hi, can I help you?",
"Sure, I'd to book a flight to Iceland.",
"Your flight has been booked."
])
# Get a response to the input text 'How are you?'
response = chatbot.get_response('I would like to book a flight.')
print(response)
|
Add training to basic example
|
Add training to basic example
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,gunthercox/ChatterBot
|
python
|
## Code Before:
from chatterbot import ChatBot
# Create a new chat bot named Charlie
chatbot = ChatBot('Charlie')
# Get a response to the input text 'How are you?'
response = chatbot.get_response('How are you?')
print(response)
## Instruction:
Add training to basic example
## Code After:
from chatterbot import ChatBot
# Create a new chat bot named Charlie
chatbot = ChatBot(
'Charlie',
trainer='chatterbot.trainers.ListTrainer'
)
chatbot.train([
"Hi, can I help you?",
"Sure, I'd to book a flight to Iceland.",
"Your flight has been booked."
])
# Get a response to the input text 'How are you?'
response = chatbot.get_response('I would like to book a flight.')
print(response)
|
// ... existing code ...
from chatterbot import ChatBot
# Create a new chat bot named Charlie
chatbot = ChatBot(
'Charlie',
trainer='chatterbot.trainers.ListTrainer'
)
chatbot.train([
"Hi, can I help you?",
"Sure, I'd to book a flight to Iceland.",
"Your flight has been booked."
])
# Get a response to the input text 'How are you?'
response = chatbot.get_response('I would like to book a flight.')
print(response)
// ... rest of the code ...
|
b1529393f0a4660def88e2e3df1d34446ebc48ff
|
Operator.h
|
Operator.h
|
/*===- Operator.h - libSimulation -=============================================
*
* DEMON
*
* This file is distributed under the BSD Open Source License. See LICENSE.TXT
* for details.
*
*===-----------------------------------------------------------------------===*/
#ifndef OPERATOR_H
#define OPERATOR_H
#include "Cloud.h"
class Operator {
public:
Cloud * const cloud;
Operator(Cloud * const myCloud) : cloud(myCloud) {}
virtual ~Operator() {}
virtual void operation1(const double currentTime)=0;
virtual void operation2(const double currentTime)=0;
virtual void operation3(const double currentTime)=0;
virtual void operation4(const double currentTime)=0;
};
#endif // OPERATOR_H
|
/*===- Operator.h - libSimulation -=============================================
*
* DEMON
*
* This file is distributed under the BSD Open Source License. See LICENSE.TXT
* for details.
*
*===-----------------------------------------------------------------------===*/
#ifndef OPERATOR_H
#define OPERATOR_H
#include "Cloud.h"
typedef unsigned int operator_index;
class Operator {
public:
Cloud * const cloud;
Operator(Cloud * const myCloud) : cloud(myCloud) {}
virtual ~Operator() {}
virtual void operation1(const double currentTime)=0;
virtual void operation2(const double currentTime)=0;
virtual void operation3(const double currentTime)=0;
virtual void operation4(const double currentTime)=0;
};
#endif // OPERATOR_H
|
Add typedef to number of operators. This will help inside code to distinguish from counting particles, forces or operators.
|
Add typedef to number of operators. This will help inside code to distinguish from counting particles, forces or operators.
|
C
|
bsd-3-clause
|
leios/demonsimulationcode,leios/demonsimulationcode
|
c
|
## Code Before:
/*===- Operator.h - libSimulation -=============================================
*
* DEMON
*
* This file is distributed under the BSD Open Source License. See LICENSE.TXT
* for details.
*
*===-----------------------------------------------------------------------===*/
#ifndef OPERATOR_H
#define OPERATOR_H
#include "Cloud.h"
class Operator {
public:
Cloud * const cloud;
Operator(Cloud * const myCloud) : cloud(myCloud) {}
virtual ~Operator() {}
virtual void operation1(const double currentTime)=0;
virtual void operation2(const double currentTime)=0;
virtual void operation3(const double currentTime)=0;
virtual void operation4(const double currentTime)=0;
};
#endif // OPERATOR_H
## Instruction:
Add typedef to number of operators. This will help inside code to distinguish from counting particles, forces or operators.
## Code After:
/*===- Operator.h - libSimulation -=============================================
*
* DEMON
*
* This file is distributed under the BSD Open Source License. See LICENSE.TXT
* for details.
*
*===-----------------------------------------------------------------------===*/
#ifndef OPERATOR_H
#define OPERATOR_H
#include "Cloud.h"
typedef unsigned int operator_index;
class Operator {
public:
Cloud * const cloud;
Operator(Cloud * const myCloud) : cloud(myCloud) {}
virtual ~Operator() {}
virtual void operation1(const double currentTime)=0;
virtual void operation2(const double currentTime)=0;
virtual void operation3(const double currentTime)=0;
virtual void operation4(const double currentTime)=0;
};
#endif // OPERATOR_H
|
// ... existing code ...
#include "Cloud.h"
typedef unsigned int operator_index;
class Operator {
public:
Cloud * const cloud;
// ... rest of the code ...
|
a05a899eaf1f3e90ba9480e770561fa215ccfdb6
|
src/main/java/im/darkgeek/dicts/DictItem.java
|
src/main/java/im/darkgeek/dicts/DictItem.java
|
package im.darkgeek.dicts;
/**
* Created by justin on 15-8-28.
*/
public class DictItem {
private String word;
private String explanation;
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = handleUnsafeChars(word);
}
public String getExplanation() {
return explanation;
}
public void setExplanation(String explanation) {
this.explanation = handleUnsafeChars(explanation);
}
private String handleUnsafeChars(String raw) {
return
raw.replace("\n", "")
.replace("\r", "");
}
@Override
public String toString() {
return "DictItem{" +
"word='" + word + '\'' +
", explanation='" + explanation + '\'' +
'}';
}
}
|
package im.darkgeek.dicts;
/**
* Created by justin on 15-8-28.
*/
public class DictItem {
private String word;
private String explanation;
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = handleUnsafeChars(word);
}
public String getExplanation() {
return explanation;
}
public void setExplanation(String explanation) {
this.explanation = handleUnsafeChars(explanation);
}
private String handleUnsafeChars(String raw) {
return
raw.replace("\n", "")
.replace("\r", "");
}
@Override
public String toString() {
return "DictItem{" +
"word='" + word + '\'' +
", explanation='" + explanation + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DictItem item = (DictItem) o;
if (word != null ? !word.equals(item.word) : item.word != null) return false;
return !(explanation != null ? !explanation.equals(item.explanation) : item.explanation != null);
}
@Override
public int hashCode() {
return 0;
}
}
|
Add equals method for comparision purpose.
|
Add equals method for comparision purpose.
|
Java
|
bsd-2-clause
|
darkgeek/gcide-converter
|
java
|
## Code Before:
package im.darkgeek.dicts;
/**
* Created by justin on 15-8-28.
*/
public class DictItem {
private String word;
private String explanation;
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = handleUnsafeChars(word);
}
public String getExplanation() {
return explanation;
}
public void setExplanation(String explanation) {
this.explanation = handleUnsafeChars(explanation);
}
private String handleUnsafeChars(String raw) {
return
raw.replace("\n", "")
.replace("\r", "");
}
@Override
public String toString() {
return "DictItem{" +
"word='" + word + '\'' +
", explanation='" + explanation + '\'' +
'}';
}
}
## Instruction:
Add equals method for comparision purpose.
## Code After:
package im.darkgeek.dicts;
/**
* Created by justin on 15-8-28.
*/
public class DictItem {
private String word;
private String explanation;
public String getWord() {
return word;
}
public void setWord(String word) {
this.word = handleUnsafeChars(word);
}
public String getExplanation() {
return explanation;
}
public void setExplanation(String explanation) {
this.explanation = handleUnsafeChars(explanation);
}
private String handleUnsafeChars(String raw) {
return
raw.replace("\n", "")
.replace("\r", "");
}
@Override
public String toString() {
return "DictItem{" +
"word='" + word + '\'' +
", explanation='" + explanation + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DictItem item = (DictItem) o;
if (word != null ? !word.equals(item.word) : item.word != null) return false;
return !(explanation != null ? !explanation.equals(item.explanation) : item.explanation != null);
}
@Override
public int hashCode() {
return 0;
}
}
|
...
", explanation='" + explanation + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DictItem item = (DictItem) o;
if (word != null ? !word.equals(item.word) : item.word != null) return false;
return !(explanation != null ? !explanation.equals(item.explanation) : item.explanation != null);
}
@Override
public int hashCode() {
return 0;
}
}
...
|
366316b0ea20ae178670581b61c52c481682d2b0
|
cosmic_ray/operators/exception_replacer.py
|
cosmic_ray/operators/exception_replacer.py
|
import ast
import builtins
from .operator import Operator
class OutOfNoWhereException(Exception):
pass
setattr(builtins, OutOfNoWhereException.__name__, OutOfNoWhereException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = OutOfNoWhereException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
import ast
import builtins
from .operator import Operator
class CosmicRayTestingException(Exception):
pass
setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = CosmicRayTestingException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
Change exception name to CosmicRayTestingException
|
Change exception name to CosmicRayTestingException
|
Python
|
mit
|
sixty-north/cosmic-ray
|
python
|
## Code Before:
import ast
import builtins
from .operator import Operator
class OutOfNoWhereException(Exception):
pass
setattr(builtins, OutOfNoWhereException.__name__, OutOfNoWhereException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = OutOfNoWhereException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
## Instruction:
Change exception name to CosmicRayTestingException
## Code After:
import ast
import builtins
from .operator import Operator
class CosmicRayTestingException(Exception):
pass
setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
"""An operator that modifies exception handlers."""
def visit_ExceptHandler(self, node): # noqa
return self.visit_mutation_site(node)
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = CosmicRayTestingException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
return new_node
|
...
from .operator import Operator
class CosmicRayTestingException(Exception):
pass
setattr(builtins, CosmicRayTestingException.__name__, CosmicRayTestingException)
class ExceptionReplacer(Operator):
...
def mutate(self, node, _):
"""Modify the exception handler with another exception type."""
except_id = CosmicRayTestingException.__name__
except_type = ast.Name(id=except_id, ctx=ast.Load())
new_node = ast.ExceptHandler(type=except_type, name=node.name,
body=node.body)
...
|
6c31a6c956fd52ba5aa1835fe7feae7e0f37b3a6
|
src/com/quollwriter/data/comparators/ChapterItemSorter.java
|
src/com/quollwriter/data/comparators/ChapterItemSorter.java
|
package com.quollwriter.data.comparators;
import java.util.*;
import com.quollwriter.data.*;
public class ChapterItemSorter implements Comparator<ChapterItem>
{
public int compare (ChapterItem o1,
ChapterItem o2)
{
/*
if (o1.getKey () == null)
{
return 1;
}
if (o2.getKey () == null)
{
return 1;
}
*/
if (o1.getPosition () == o2.getPosition ())
{
if ((o1.getKey () == null)
||
(o1.getName () == null)
)
{
return 1;
}
if ((o2.getKey () == null)
||
(o2.getName () == null)
)
{
return 1;
}
if ((o1 instanceof Scene)
&&
(o2 instanceof OutlineItem)
)
{
return -1;
}
if ((o2 instanceof Scene)
&&
(o1 instanceof OutlineItem)
)
{
return 1;
}
return o1.getName ().compareTo (o2.getName ());
//return (int) (o1.getKey () - o2.getKey ());
}
return o1.getPosition () - o2.getPosition ();
}
public boolean equals (Object o)
{
return this == o;
}
}
|
package com.quollwriter.data.comparators;
import java.util.*;
import com.quollwriter.data.*;
public class ChapterItemSorter implements Comparator<ChapterItem>
{
public int compare (ChapterItem o1,
ChapterItem o2)
{
/*
if (o1.getKey () == null)
{
return 1;
}
if (o2.getKey () == null)
{
return 1;
}
*/
if (o1.getPosition () == o2.getPosition ())
{
if ((o1.getKey () == null)
||
(o1.getName () == null)
)
{
return 1;
}
if ((o2.getKey () == null)
||
(o2.getName () == null)
)
{
return 1;
}
if ((o1 instanceof Scene)
&&
(o2 instanceof OutlineItem)
)
{
return -1;
}
if ((o2 instanceof Scene)
&&
(o1 instanceof OutlineItem)
)
{
return 1;
}
int nc = o1.getName ().compareTo (o2.getName ());
if (nc == 0)
{
// Return the one created first.
return o1.getDateCreated ().compareTo (o2.getDateCreated ());
}
return nc;
//return (int) (o1.getKey () - o2.getKey ());
}
return o1.getPosition () - o2.getPosition ();
}
public boolean equals (Object o)
{
return this == o;
}
}
|
Tweak to make sorting work when two objects have same name.
|
Tweak to make sorting work when two objects have same name.
|
Java
|
apache-2.0
|
garybentley/quollwriter,garybentley/quollwriter
|
java
|
## Code Before:
package com.quollwriter.data.comparators;
import java.util.*;
import com.quollwriter.data.*;
public class ChapterItemSorter implements Comparator<ChapterItem>
{
public int compare (ChapterItem o1,
ChapterItem o2)
{
/*
if (o1.getKey () == null)
{
return 1;
}
if (o2.getKey () == null)
{
return 1;
}
*/
if (o1.getPosition () == o2.getPosition ())
{
if ((o1.getKey () == null)
||
(o1.getName () == null)
)
{
return 1;
}
if ((o2.getKey () == null)
||
(o2.getName () == null)
)
{
return 1;
}
if ((o1 instanceof Scene)
&&
(o2 instanceof OutlineItem)
)
{
return -1;
}
if ((o2 instanceof Scene)
&&
(o1 instanceof OutlineItem)
)
{
return 1;
}
return o1.getName ().compareTo (o2.getName ());
//return (int) (o1.getKey () - o2.getKey ());
}
return o1.getPosition () - o2.getPosition ();
}
public boolean equals (Object o)
{
return this == o;
}
}
## Instruction:
Tweak to make sorting work when two objects have same name.
## Code After:
package com.quollwriter.data.comparators;
import java.util.*;
import com.quollwriter.data.*;
public class ChapterItemSorter implements Comparator<ChapterItem>
{
public int compare (ChapterItem o1,
ChapterItem o2)
{
/*
if (o1.getKey () == null)
{
return 1;
}
if (o2.getKey () == null)
{
return 1;
}
*/
if (o1.getPosition () == o2.getPosition ())
{
if ((o1.getKey () == null)
||
(o1.getName () == null)
)
{
return 1;
}
if ((o2.getKey () == null)
||
(o2.getName () == null)
)
{
return 1;
}
if ((o1 instanceof Scene)
&&
(o2 instanceof OutlineItem)
)
{
return -1;
}
if ((o2 instanceof Scene)
&&
(o1 instanceof OutlineItem)
)
{
return 1;
}
int nc = o1.getName ().compareTo (o2.getName ());
if (nc == 0)
{
// Return the one created first.
return o1.getDateCreated ().compareTo (o2.getDateCreated ());
}
return nc;
//return (int) (o1.getKey () - o2.getKey ());
}
return o1.getPosition () - o2.getPosition ();
}
public boolean equals (Object o)
{
return this == o;
}
}
|
# ... existing code ...
return 1;
}
int nc = o1.getName ().compareTo (o2.getName ());
if (nc == 0)
{
// Return the one created first.
return o1.getDateCreated ().compareTo (o2.getDateCreated ());
}
return nc;
//return (int) (o1.getKey () - o2.getKey ());
# ... rest of the code ...
|
eebf1ad44835dba0300b8afd3ce11bd883f5d118
|
osgi.enroute.trains.api/src/osgi/enroute/trains/controller/api/RFIDSegmentController.java
|
osgi.enroute.trains.api/src/osgi/enroute/trains/controller/api/RFIDSegmentController.java
|
package osgi.enroute.trains.controller.api;
import org.osgi.util.promise.Promise;
/**
* This controller controls a LOCATOR Segment
*/
public interface RFIDSegmentController extends SegmentController {
/**
* Read an RFID. Resolves the promise when a new RFID is read.
* @return
*/
Promise<String> lastRFID();
}
|
package osgi.enroute.trains.controller.api;
import org.osgi.util.promise.Promise;
/**
* This controller controls a LOCATOR Segment
*/
public interface RFIDSegmentController extends SegmentController {
/**
* Return the last seen RFID
*/
String lastRFID();
/**
* Read an RFID. Resolves the promise when a new RFID is read.
* @return
*/
Promise<String> nextRFID();
}
|
Change method lastRFID to return immediately and have nextRFID method to return promise
|
Change method lastRFID to return immediately and have nextRFID method to return promise
|
Java
|
apache-2.0
|
tverbele/osgi.enroute.examples,osgi/osgi.enroute.examples,tverbele/osgi.enroute.examples,osgi/osgi.enroute.examples,tverbele/osgi.enroute.examples,osgi/osgi.enroute.examples
|
java
|
## Code Before:
package osgi.enroute.trains.controller.api;
import org.osgi.util.promise.Promise;
/**
* This controller controls a LOCATOR Segment
*/
public interface RFIDSegmentController extends SegmentController {
/**
* Read an RFID. Resolves the promise when a new RFID is read.
* @return
*/
Promise<String> lastRFID();
}
## Instruction:
Change method lastRFID to return immediately and have nextRFID method to return promise
## Code After:
package osgi.enroute.trains.controller.api;
import org.osgi.util.promise.Promise;
/**
* This controller controls a LOCATOR Segment
*/
public interface RFIDSegmentController extends SegmentController {
/**
* Return the last seen RFID
*/
String lastRFID();
/**
* Read an RFID. Resolves the promise when a new RFID is read.
* @return
*/
Promise<String> nextRFID();
}
|
...
public interface RFIDSegmentController extends SegmentController {
/**
* Return the last seen RFID
*/
String lastRFID();
/**
* Read an RFID. Resolves the promise when a new RFID is read.
* @return
*/
Promise<String> nextRFID();
}
...
|
4df134a89071c64e0eb2d945415e557df2a1e78d
|
server/src/main/java/scoutmgr/server/rest/JaxRsActivator.java
|
server/src/main/java/scoutmgr/server/rest/JaxRsActivator.java
|
package scoutmgr.server.rest;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
@ApplicationPath( "/api" )
public class JaxRsActivator
extends Application
{
@Override
public Set<Class<?>> getClasses()
{
final Set<Class<?>> classes = new HashSet<>();
classes.addAll( super.getClasses() );
classes.add( ScoutmgrSessionRestService.class );
classes.add( ScoutmgrReplicantPollRestService.class );
classes.add( ScoutmgrBadSessionExceptionMapper.class );
return classes;
}
}
|
package scoutmgr.server.rest;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
@ApplicationPath( "/api" )
public class JaxRsActivator
extends Application
{
@Override
public Set<Class<?>> getClasses()
{
final Set<Class<?>> classes = new HashSet<>();
classes.addAll( super.getClasses() );
classes.add( ScoutmgrSessionRestService.class );
classes.add( ScoutmgrReplicantPollRestService.class );
return classes;
}
}
|
Remove mapper. Unsure why we are keeping this class at all.
|
Remove mapper.
Unsure why we are keeping this class at all.
|
Java
|
apache-2.0
|
jcosmo/scoutmgr,jcosmo/scoutmgr,jcosmo/scoutmgr
|
java
|
## Code Before:
package scoutmgr.server.rest;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
@ApplicationPath( "/api" )
public class JaxRsActivator
extends Application
{
@Override
public Set<Class<?>> getClasses()
{
final Set<Class<?>> classes = new HashSet<>();
classes.addAll( super.getClasses() );
classes.add( ScoutmgrSessionRestService.class );
classes.add( ScoutmgrReplicantPollRestService.class );
classes.add( ScoutmgrBadSessionExceptionMapper.class );
return classes;
}
}
## Instruction:
Remove mapper.
Unsure why we are keeping this class at all.
## Code After:
package scoutmgr.server.rest;
import java.util.HashSet;
import java.util.Set;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
@ApplicationPath( "/api" )
public class JaxRsActivator
extends Application
{
@Override
public Set<Class<?>> getClasses()
{
final Set<Class<?>> classes = new HashSet<>();
classes.addAll( super.getClasses() );
classes.add( ScoutmgrSessionRestService.class );
classes.add( ScoutmgrReplicantPollRestService.class );
return classes;
}
}
|
...
classes.addAll( super.getClasses() );
classes.add( ScoutmgrSessionRestService.class );
classes.add( ScoutmgrReplicantPollRestService.class );
return classes;
}
}
...
|
af0f42b86a1e3f916041eb78a4332daf0f22531a
|
OIPA/manage.py
|
OIPA/manage.py
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OIPA.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
from dotenv import find_dotenv, load_dotenv
load_dotenv(find_dotenv())
if __name__ == "__main__":
current_settings = os.getenv("DJANGO_SETTINGS_MODULE", None)
if not current_settings:
raise Exception(
"Please configure your .env file along-side manage.py file and "
"set 'DJANGO_SETTINGS_MODULE=OIPA.settings_file' variable there!"
)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", current_settings)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Load current settings from .env file
|
Load current settings from .env file
OIPA-645
|
Python
|
agpl-3.0
|
openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA
|
python
|
## Code Before:
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OIPA.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
## Instruction:
Load current settings from .env file
OIPA-645
## Code After:
import os
import sys
from dotenv import find_dotenv, load_dotenv
load_dotenv(find_dotenv())
if __name__ == "__main__":
current_settings = os.getenv("DJANGO_SETTINGS_MODULE", None)
if not current_settings:
raise Exception(
"Please configure your .env file along-side manage.py file and "
"set 'DJANGO_SETTINGS_MODULE=OIPA.settings_file' variable there!"
)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", current_settings)
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
// ... existing code ...
import os
import sys
from dotenv import find_dotenv, load_dotenv
load_dotenv(find_dotenv())
if __name__ == "__main__":
current_settings = os.getenv("DJANGO_SETTINGS_MODULE", None)
if not current_settings:
raise Exception(
"Please configure your .env file along-side manage.py file and "
"set 'DJANGO_SETTINGS_MODULE=OIPA.settings_file' variable there!"
)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", current_settings)
from django.core.management import execute_from_command_line
// ... rest of the code ...
|
8cb37afde89b4c89079f579f3532792d7dd5ff67
|
src/http/responsedata.h
|
src/http/responsedata.h
|
namespace ApiMock {
struct ResponseData {
std::string body;
};
}
#endif
|
namespace ApiMock {
struct ResponseData {
std::string body;
std::unordered_map<std::string, std::string> headers;
HTTP_RESPONSE_CODE statusCode;
};
}
#endif
|
Add headers and status code to response
|
Add headers and status code to response
|
C
|
mit
|
Lavesson/api-mock,Lavesson/api-mock,Lavesson/api-mock,Lavesson/api-mock
|
c
|
## Code Before:
namespace ApiMock {
struct ResponseData {
std::string body;
};
}
#endif
## Instruction:
Add headers and status code to response
## Code After:
namespace ApiMock {
struct ResponseData {
std::string body;
std::unordered_map<std::string, std::string> headers;
HTTP_RESPONSE_CODE statusCode;
};
}
#endif
|
...
namespace ApiMock {
struct ResponseData {
std::string body;
std::unordered_map<std::string, std::string> headers;
HTTP_RESPONSE_CODE statusCode;
};
}
...
|
4a1cf52683b782b76fb75fa9254a37a804dda1ea
|
mywebsite/tests.py
|
mywebsite/tests.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
class ViewsTestCase(TestCase):
def test_about_view(self):
response = self.client.get(reverse('about'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "About")
|
from django.test import TestCase
from django.core.urlresolvers import reverse
class ViewsTestCase(TestCase):
def test_about_view(self):
response = self.client.get(reverse('about'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "About")
def test_contact_page(self):
response = self.client.get(reverse('contact'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Contact")
|
Add test for contact page
|
Add test for contact page
|
Python
|
mit
|
TomGijselinck/mywebsite,TomGijselinck/mywebsite
|
python
|
## Code Before:
from django.test import TestCase
from django.core.urlresolvers import reverse
class ViewsTestCase(TestCase):
def test_about_view(self):
response = self.client.get(reverse('about'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "About")
## Instruction:
Add test for contact page
## Code After:
from django.test import TestCase
from django.core.urlresolvers import reverse
class ViewsTestCase(TestCase):
def test_about_view(self):
response = self.client.get(reverse('about'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "About")
def test_contact_page(self):
response = self.client.get(reverse('contact'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Contact")
|
...
response = self.client.get(reverse('about'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "About")
def test_contact_page(self):
response = self.client.get(reverse('contact'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Contact")
...
|
aa8fb3c94dbbb7cae9b13d4441c59a7607b84583
|
cloudshell/networking/networking_resource_driver_interface.py
|
cloudshell/networking/networking_resource_driver_interface.py
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
Modify networking resource driver interface according to the latest networking standard
|
Modify networking resource driver interface according to the latest networking standard
|
Python
|
apache-2.0
|
QualiSystems/cloudshell-networking,QualiSystems/CloudShell-Networking-Core
|
python
|
## Code Before:
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
## Instruction:
Modify networking resource driver interface according to the latest networking standard
## Code After:
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
...
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
...
|
385740798eeca732519bb3ae7cfe2d96ed05c57a
|
app/src/main/java/cl/monsoon/s1next/widget/AppActivityLifecycleCallbacks.java
|
app/src/main/java/cl/monsoon/s1next/widget/AppActivityLifecycleCallbacks.java
|
package cl.monsoon.s1next.widget;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.os.Bundle;
public final class AppActivityLifecycleCallbacks implements Application.ActivityLifecycleCallbacks {
/**
* Forked from http://stackoverflow.com/a/13809991
*/
private int visibleCount;
private WifiBroadcastReceiver mWifiBroadcastReceiver;
public AppActivityLifecycleCallbacks(Context context) {
mWifiBroadcastReceiver = new WifiBroadcastReceiver(context);
}
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {}
@Override
public void onActivityStarted(Activity activity) {
visibleCount++;
}
@Override
public void onActivityResumed(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mWifiBroadcastReceiver.registerIfNeeded();
}
}
@Override
public void onActivityPaused(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mWifiBroadcastReceiver.unregisterIfNeeded();
}
}
@Override
public void onActivityStopped(Activity activity) {
visibleCount--;
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {}
@Override
public void onActivityDestroyed(Activity activity) {}
public boolean isAppVisible() {
return visibleCount > 0;
}
}
|
package cl.monsoon.s1next.widget;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.os.Bundle;
public final class AppActivityLifecycleCallbacks implements Application.ActivityLifecycleCallbacks {
/**
* Forked from http://stackoverflow.com/a/13809991
*/
private int visibleCount;
private WifiBroadcastReceiver mWifiBroadcastReceiver;
private int mNeedMonitorWifiActivityCount;
public AppActivityLifecycleCallbacks(Context context) {
mWifiBroadcastReceiver = new WifiBroadcastReceiver(context);
}
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {}
@Override
public void onActivityStarted(Activity activity) {
visibleCount++;
}
@Override
public void onActivityResumed(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
if (mNeedMonitorWifiActivityCount == 0) {
mWifiBroadcastReceiver.registerIfNeeded();
}
mNeedMonitorWifiActivityCount++;
}
}
@Override
public void onActivityPaused(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mNeedMonitorWifiActivityCount--;
if (mNeedMonitorWifiActivityCount == 0) {
mWifiBroadcastReceiver.unregisterIfNeeded();
}
}
}
@Override
public void onActivityStopped(Activity activity) {
visibleCount--;
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {}
@Override
public void onActivityDestroyed(Activity activity) {}
public boolean isAppVisible() {
return visibleCount > 0;
}
}
|
Fix IllegalStateException if we recreate Activities
|
Fix IllegalStateException if we recreate Activities
We do not need to unregister `WifiBroadcastReceiver` if some Activities still need to monitor Wi-Fi.
|
Java
|
apache-2.0
|
ykrank/S1-Next,gy6221/S1-Next,superpig11/S1-Next,ykrank/S1-Next,ykrank/S1-Next,floating-cat/S1-Next
|
java
|
## Code Before:
package cl.monsoon.s1next.widget;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.os.Bundle;
public final class AppActivityLifecycleCallbacks implements Application.ActivityLifecycleCallbacks {
/**
* Forked from http://stackoverflow.com/a/13809991
*/
private int visibleCount;
private WifiBroadcastReceiver mWifiBroadcastReceiver;
public AppActivityLifecycleCallbacks(Context context) {
mWifiBroadcastReceiver = new WifiBroadcastReceiver(context);
}
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {}
@Override
public void onActivityStarted(Activity activity) {
visibleCount++;
}
@Override
public void onActivityResumed(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mWifiBroadcastReceiver.registerIfNeeded();
}
}
@Override
public void onActivityPaused(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mWifiBroadcastReceiver.unregisterIfNeeded();
}
}
@Override
public void onActivityStopped(Activity activity) {
visibleCount--;
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {}
@Override
public void onActivityDestroyed(Activity activity) {}
public boolean isAppVisible() {
return visibleCount > 0;
}
}
## Instruction:
Fix IllegalStateException if we recreate Activities
We do not need to unregister `WifiBroadcastReceiver` if some Activities still need to monitor Wi-Fi.
## Code After:
package cl.monsoon.s1next.widget;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.os.Bundle;
public final class AppActivityLifecycleCallbacks implements Application.ActivityLifecycleCallbacks {
/**
* Forked from http://stackoverflow.com/a/13809991
*/
private int visibleCount;
private WifiBroadcastReceiver mWifiBroadcastReceiver;
private int mNeedMonitorWifiActivityCount;
public AppActivityLifecycleCallbacks(Context context) {
mWifiBroadcastReceiver = new WifiBroadcastReceiver(context);
}
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {}
@Override
public void onActivityStarted(Activity activity) {
visibleCount++;
}
@Override
public void onActivityResumed(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
if (mNeedMonitorWifiActivityCount == 0) {
mWifiBroadcastReceiver.registerIfNeeded();
}
mNeedMonitorWifiActivityCount++;
}
}
@Override
public void onActivityPaused(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mNeedMonitorWifiActivityCount--;
if (mNeedMonitorWifiActivityCount == 0) {
mWifiBroadcastReceiver.unregisterIfNeeded();
}
}
}
@Override
public void onActivityStopped(Activity activity) {
visibleCount--;
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {}
@Override
public void onActivityDestroyed(Activity activity) {}
public boolean isAppVisible() {
return visibleCount > 0;
}
}
|
# ... existing code ...
private int visibleCount;
private WifiBroadcastReceiver mWifiBroadcastReceiver;
private int mNeedMonitorWifiActivityCount;
public AppActivityLifecycleCallbacks(Context context) {
mWifiBroadcastReceiver = new WifiBroadcastReceiver(context);
# ... modified code ...
@Override
public void onActivityResumed(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
if (mNeedMonitorWifiActivityCount == 0) {
mWifiBroadcastReceiver.registerIfNeeded();
}
mNeedMonitorWifiActivityCount++;
}
}
...
@Override
public void onActivityPaused(Activity activity) {
if (activity instanceof WifiBroadcastReceiver.NeedMonitorWifi) {
mNeedMonitorWifiActivityCount--;
if (mNeedMonitorWifiActivityCount == 0) {
mWifiBroadcastReceiver.unregisterIfNeeded();
}
}
}
# ... rest of the code ...
|
a292d236bac2277a68c38a123658d8b7fd702c29
|
collect-earth/collect-earth-app/src/main/java/org/openforis/collect/earth/planet/FeatureSorter.java
|
collect-earth/collect-earth-app/src/main/java/org/openforis/collect/earth/planet/FeatureSorter.java
|
package org.openforis.collect.earth.planet;
import java.util.Comparator;;
public class FeatureSorter implements Comparator<Feature>{
@Override
public int compare(Feature o1, Feature o2) {
if( o1.getProperties().getCloudPercent() !=null && o2.getProperties().getCloudPercent() != null )
if( o1.getProperties().getCloudPercent() == o2.getProperties().getCloudPercent()) {
return o2.getProperties().getVisibleConfidencePercent() - o1.getProperties().getVisibleConfidencePercent();
}else {
return o1.getProperties().getCloudPercent() - o2.getProperties().getCloudPercent();
}
else if( o1.getProperties().getCloudPercent() != null ) {
return -1;
}else if( o2.getProperties().getCloudPercent() != null) {
return 1;
}else {
return (int) ( (o1.getProperties().getCloudCover() - o2.getProperties().getCloudCover() ) * 100 );
}
}
}
|
package org.openforis.collect.earth.planet;
import java.util.Comparator;;
public class FeatureSorter implements Comparator<Feature>{
@Override
public int compare(Feature o1, Feature o2) {
if( o1.getProperties().getCloudPercent() !=null && o2.getProperties().getCloudPercent() != null ) {
if( o1.getProperties().getCloudPercent() == o2.getProperties().getCloudPercent()) {
if( o2.getProperties().getVisibleConfidencePercent() !=null && o1.getProperties().getVisibleConfidencePercent() != null ) {
return o2.getProperties().getVisibleConfidencePercent() - o1.getProperties().getVisibleConfidencePercent();
}else if( o2.getProperties().getVisibleConfidencePercent() !=null ) {
return 1;
}else{
return -1;
}
}else {
return o1.getProperties().getCloudPercent() - o2.getProperties().getCloudPercent();
}
}else if( o1.getProperties().getCloudPercent() != null ) {
return -1;
}else if( o2.getProperties().getCloudPercent() != null) {
return 1;
}else {
if( o1.getProperties().getCloudCover()!=null && o2.getProperties().getCloudCover()!=null ) {
return (int) ( (o1.getProperties().getCloudCover() - o2.getProperties().getCloudCover() ) * 100 );
}else
return 1; // No way to know so return the latest
}
}
}
|
Fix null exception when images have no visible percent properties
|
Fix null exception when images have no visible percent properties
|
Java
|
mit
|
openforis/collect-earth,openforis/collect-earth,openforis/collect-earth
|
java
|
## Code Before:
package org.openforis.collect.earth.planet;
import java.util.Comparator;;
public class FeatureSorter implements Comparator<Feature>{
@Override
public int compare(Feature o1, Feature o2) {
if( o1.getProperties().getCloudPercent() !=null && o2.getProperties().getCloudPercent() != null )
if( o1.getProperties().getCloudPercent() == o2.getProperties().getCloudPercent()) {
return o2.getProperties().getVisibleConfidencePercent() - o1.getProperties().getVisibleConfidencePercent();
}else {
return o1.getProperties().getCloudPercent() - o2.getProperties().getCloudPercent();
}
else if( o1.getProperties().getCloudPercent() != null ) {
return -1;
}else if( o2.getProperties().getCloudPercent() != null) {
return 1;
}else {
return (int) ( (o1.getProperties().getCloudCover() - o2.getProperties().getCloudCover() ) * 100 );
}
}
}
## Instruction:
Fix null exception when images have no visible percent properties
## Code After:
package org.openforis.collect.earth.planet;
import java.util.Comparator;;
public class FeatureSorter implements Comparator<Feature>{
@Override
public int compare(Feature o1, Feature o2) {
if( o1.getProperties().getCloudPercent() !=null && o2.getProperties().getCloudPercent() != null ) {
if( o1.getProperties().getCloudPercent() == o2.getProperties().getCloudPercent()) {
if( o2.getProperties().getVisibleConfidencePercent() !=null && o1.getProperties().getVisibleConfidencePercent() != null ) {
return o2.getProperties().getVisibleConfidencePercent() - o1.getProperties().getVisibleConfidencePercent();
}else if( o2.getProperties().getVisibleConfidencePercent() !=null ) {
return 1;
}else{
return -1;
}
}else {
return o1.getProperties().getCloudPercent() - o2.getProperties().getCloudPercent();
}
}else if( o1.getProperties().getCloudPercent() != null ) {
return -1;
}else if( o2.getProperties().getCloudPercent() != null) {
return 1;
}else {
if( o1.getProperties().getCloudCover()!=null && o2.getProperties().getCloudCover()!=null ) {
return (int) ( (o1.getProperties().getCloudCover() - o2.getProperties().getCloudCover() ) * 100 );
}else
return 1; // No way to know so return the latest
}
}
}
|
...
@Override
public int compare(Feature o1, Feature o2) {
if( o1.getProperties().getCloudPercent() !=null && o2.getProperties().getCloudPercent() != null ) {
if( o1.getProperties().getCloudPercent() == o2.getProperties().getCloudPercent()) {
if( o2.getProperties().getVisibleConfidencePercent() !=null && o1.getProperties().getVisibleConfidencePercent() != null ) {
return o2.getProperties().getVisibleConfidencePercent() - o1.getProperties().getVisibleConfidencePercent();
}else if( o2.getProperties().getVisibleConfidencePercent() !=null ) {
return 1;
}else{
return -1;
}
}else {
return o1.getProperties().getCloudPercent() - o2.getProperties().getCloudPercent();
}
}else if( o1.getProperties().getCloudPercent() != null ) {
return -1;
}else if( o2.getProperties().getCloudPercent() != null) {
return 1;
}else {
if( o1.getProperties().getCloudCover()!=null && o2.getProperties().getCloudCover()!=null ) {
return (int) ( (o1.getProperties().getCloudCover() - o2.getProperties().getCloudCover() ) * 100 );
}else
return 1; // No way to know so return the latest
}
}
...
|
9272fd30c70e946bfcc003a2936f57efdaa05bd7
|
bindings/jupyroot/python/JupyROOT/__init__.py
|
bindings/jupyroot/python/JupyROOT/__init__.py
|
from JupyROOT.helpers import cppcompleter, utils
if '__IPYTHON__' in __builtins__ and __IPYTHON__:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
|
from JupyROOT.helpers import cppcompleter, utils
# Check if we are in the IPython shell
try:
import builtins
except ImportError:
import __builtin__ as builtins # Py2
_is_ipython = hasattr(builtins, '__IPYTHON__')
if _is_ipython:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
|
Update logic to check for IPython
|
[JupyROOT] Update logic to check for IPython
To sync it with what was already introduced in ROOT/__init__.py
|
Python
|
lgpl-2.1
|
olifre/root,olifre/root,root-mirror/root,olifre/root,olifre/root,olifre/root,root-mirror/root,root-mirror/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,olifre/root,root-mirror/root,root-mirror/root,root-mirror/root,root-mirror/root,olifre/root,olifre/root,root-mirror/root,olifre/root
|
python
|
## Code Before:
from JupyROOT.helpers import cppcompleter, utils
if '__IPYTHON__' in __builtins__ and __IPYTHON__:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
## Instruction:
[JupyROOT] Update logic to check for IPython
To sync it with what was already introduced in ROOT/__init__.py
## Code After:
from JupyROOT.helpers import cppcompleter, utils
# Check if we are in the IPython shell
try:
import builtins
except ImportError:
import __builtin__ as builtins # Py2
_is_ipython = hasattr(builtins, '__IPYTHON__')
if _is_ipython:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
|
...
from JupyROOT.helpers import cppcompleter, utils
# Check if we are in the IPython shell
try:
import builtins
except ImportError:
import __builtin__ as builtins # Py2
_is_ipython = hasattr(builtins, '__IPYTHON__')
if _is_ipython:
cppcompleter.load_ipython_extension(get_ipython())
utils.iPythonize()
...
|
06fd79674eeb82cd1e6cabca1e513f97ccf48cbf
|
avocado/forms.py
|
avocado/forms.py
|
from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
Fix DataField admin validation for identifiers
|
Fix DataField admin validation for identifiers
|
Python
|
bsd-2-clause
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
python
|
## Code Before:
from django import forms
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data['app_name']
if models.get_app(app_name) is None:
raise forms.ValidationError('The app "{0}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = super(DataFieldAdminForm, self).clean()
instance = super(DataFieldAdminForm, self).save(commit=False)
model_name = cleaned_data['model_name']
if instance.model is None:
del cleaned_data['model_name']
msg = 'The model "{0}" could not be found'.format(model_name)
self._errors['model_name'] = self.error_class([msg])
# test `field_name'
field_name = cleaned_data['field_name']
if instance.field is None:
del cleaned_data['field_name']
msg = 'The model "{0}" does not have a field named "{1}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
## Instruction:
Fix DataField admin validation for identifiers
## Code After:
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
model = DataField
|
// ... existing code ...
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from avocado.models import DataField
// ... modified code ...
class DataFieldAdminForm(forms.ModelForm):
def clean_app_name(self):
app_name = self.cleaned_data.get('app_name')
try:
models.get_app(app_name)
except ImproperlyConfigured:
raise forms.ValidationError('The app "{}" could not be found'.format(app_name))
return app_name
def clean(self):
cleaned_data = self.cleaned_data
app_name = self.cleaned_data.get('app_name')
model_name = cleaned_data.get('model_name')
field_name = cleaned_data.get('field_name')
model = models.get_model(app_name, model_name)
if model is None:
del cleaned_data['model_name']
msg = 'The model "{}" could not be found in the app "{}"'.format(model_name, app_name)
self._errors['model_name'] = self.error_class([msg])
elif not model._meta.get_field_by_name(field_name):
del cleaned_data['field_name']
msg = 'The model "{}" does not have a field named "{}"'.format(model_name, field_name)
self._errors['field_name'] = self.error_class([msg])
return cleaned_data
class Meta(object):
// ... rest of the code ...
|
186a72b91798b11d13ea7f2538141f620b0787a8
|
tests/test_metrics.py
|
tests/test_metrics.py
|
import json
from . import TestCase
class MetricsTests(TestCase):
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.data.decode('utf-8')),
{'results': []})
|
from . import TestCase
class MetricsTests(TestCase):
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'results': []})
def test_noop(self):
url = '/dashboard/find'
response = self.app.get(url)
self.assertJSON(response, {'dashboards': []})
url = '/dashboard/load/foo'
response = self.app.get(url)
self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."},
status_code=404)
url = '/events/get_data'
response = self.app.get(url)
self.assertJSON(response, [])
|
Add test for noop routes
|
Add test for noop routes
|
Python
|
apache-2.0
|
vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,GeorgeJahad/graphite-api,vladimir-smirnov-sociomantic/graphite-api,michaelrice/graphite-api,alphapigger/graphite-api,raintank/graphite-api,hubrick/graphite-api,rackerlabs/graphite-api,Knewton/graphite-api,raintank/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,DaveBlooman/graphite-api,rackerlabs/graphite-api,brutasse/graphite-api,DaveBlooman/graphite-api,hubrick/graphite-api,raintank/graphite-api,tpeng/graphite-api,winguru/graphite-api,winguru/graphite-api,bogus-py/graphite-api,tpeng/graphite-api,cybem/graphite-api-iow,absalon-james/graphite-api,alphapigger/graphite-api,absalon-james/graphite-api,brutasse/graphite-api,GeorgeJahad/graphite-api
|
python
|
## Code Before:
import json
from . import TestCase
class MetricsTests(TestCase):
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.data.decode('utf-8')),
{'results': []})
## Instruction:
Add test for noop routes
## Code After:
from . import TestCase
class MetricsTests(TestCase):
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'results': []})
def test_noop(self):
url = '/dashboard/find'
response = self.app.get(url)
self.assertJSON(response, {'dashboards': []})
url = '/dashboard/load/foo'
response = self.app.get(url)
self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."},
status_code=404)
url = '/events/get_data'
response = self.app.get(url)
self.assertJSON(response, [])
|
# ... existing code ...
from . import TestCase
# ... modified code ...
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'results': []})
def test_noop(self):
url = '/dashboard/find'
response = self.app.get(url)
self.assertJSON(response, {'dashboards': []})
url = '/dashboard/load/foo'
response = self.app.get(url)
self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."},
status_code=404)
url = '/events/get_data'
response = self.app.get(url)
self.assertJSON(response, [])
# ... rest of the code ...
|
8c5f317a090a23f10adcc837645bd25a8b5626f8
|
shap/models/_model.py
|
shap/models/_model.py
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
Check SHAP Model call type
|
Check SHAP Model call type
|
Python
|
mit
|
slundberg/shap,slundberg/shap,slundberg/shap,slundberg/shap
|
python
|
## Code Before:
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
return np.array(self.inner_model(*args))
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
## Instruction:
Check SHAP Model call type
## Code After:
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
""" This is the superclass of all models.
"""
def __init__(self, model=None):
""" Wrap a callable model as a SHAP Model object.
"""
if isinstance(model, Model):
self.inner_model = model.inner_model
else:
self.inner_model = model
if hasattr(model, "output_names"):
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
"""
super().save(out_file)
with Serializer(out_file, "shap.Model", version=0) as s:
s.save("model", self.inner_model)
@classmethod
def load(cls, in_file, instantiate=True):
if instantiate:
return cls._instantiated_load(in_file)
kwargs = super().load(in_file, instantiate=False)
with Deserializer(in_file, "shap.Model", min_version=0, max_version=0) as s:
kwargs["model"] = s.load("model")
return kwargs
|
...
import numpy as np
from .._serializable import Serializable, Serializer, Deserializer
from torch import Tensor
class Model(Serializable):
...
self.output_names = model.output_names
def __call__(self, *args):
out = self.inner_model(*args)
out = out.cpu().detach().numpy() if isinstance(out, Tensor) else np.array(out)
return out
def save(self, out_file):
""" Save the model to the given file stream.
...
|
11d4059cf5c66e6de648c675bb049825901479cf
|
code/array_map.py
|
code/array_map.py
|
arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
|
arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
|
Use more consistent example for map
|
Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass.
|
Python
|
mit
|
Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare,evmorov/lang-compare,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare
|
python
|
## Code Before:
arr = [1, 5, 10, 20]
print(*map(lambda num: num * 2, arr))
## Instruction:
Use more consistent example for map
There is a `map` function in pythin, but for simple single expression
calculations, list comprehensions are much better suited.
While map works well if there is a function, you can pass.
## Code After:
arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
|
// ... existing code ...
arr = [1, 5, 10, 20]
print([num * 2 for num in arr])
// ... rest of the code ...
|
ed4f786de54dde50cb26cfe4859507579806a14b
|
portal_sale_distributor/models/ir_action_act_window.py
|
portal_sale_distributor/models/ir_action_act_window.py
|
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
if result and result[0].get('context'):
ctx = safe_eval(result[0].get('context', '{}'))
if ctx.get('portal_products'):
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id})
result[0].update({'context': ctx})
return result
|
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
for value in result:
if value.get('context') and 'portal_products' in value.get('context'):
eval_ctx = dict(self.env.context)
try:
ctx = safe_eval(value.get('context', '{}'), eval_ctx)
except:
ctx = {}
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id})
value.update({'context': str(ctx)})
return result
|
Adjust to avoid bugs with other values in context
|
[FIX] portal_sale_distributor: Adjust to avoid bugs with other values in context
closes ingadhoc/sale#493
X-original-commit: 441d30af0c3fa8cbbe129893107436ea69cca740
Signed-off-by: Juan José Scarafía <[email protected]>
|
Python
|
agpl-3.0
|
ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale
|
python
|
## Code Before:
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
if result and result[0].get('context'):
ctx = safe_eval(result[0].get('context', '{}'))
if ctx.get('portal_products'):
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id})
result[0].update({'context': ctx})
return result
## Instruction:
[FIX] portal_sale_distributor: Adjust to avoid bugs with other values in context
closes ingadhoc/sale#493
X-original-commit: 441d30af0c3fa8cbbe129893107436ea69cca740
Signed-off-by: Juan José Scarafía <[email protected]>
## Code After:
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
for value in result:
if value.get('context') and 'portal_products' in value.get('context'):
eval_ctx = dict(self.env.context)
try:
ctx = safe_eval(value.get('context', '{}'), eval_ctx)
except:
ctx = {}
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id})
value.update({'context': str(ctx)})
return result
|
// ... existing code ...
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
for value in result:
if value.get('context') and 'portal_products' in value.get('context'):
eval_ctx = dict(self.env.context)
try:
ctx = safe_eval(value.get('context', '{}'), eval_ctx)
except:
ctx = {}
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id})
value.update({'context': str(ctx)})
return result
// ... rest of the code ...
|
d2db26f1c08242b61709d6c297ad28e02acd7abd
|
Wangscape/noise/module/codecs/CornerCombinerBaseWrapperCodec.h
|
Wangscape/noise/module/codecs/CornerCombinerBaseWrapperCodec.h
|
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::module::Wrapper<noise::module::CornerCombinerBase>>
{
using CornerCombinerBaseWrapper = noise::module::Wrapper<noise::module::CornerCombinerBase>;
static codec::object_t<CornerCombinerBaseWrapper> codec()
{
auto codec = codec::object<CornerCombinerBaseWrapper>();
codec.required("type", codec::eq<std::string>("CornerCombinerBase"));
codec.optional("Power",
[](CornerCombinerBase CornerCombinerBaseWrapper& mw) {return mw.module.GetPower(); },
[](CornerCombinerBaseWrapper& mw, double power) {mw.module.SetPower(power); });
return codec;
}
};
}
}
|
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::module::Wrapper<noise::module::CornerCombinerBase>>
{
using CornerCombinerBaseWrapper = noise::module::Wrapper<noise::module::CornerCombinerBase>;
static codec::object_t<CornerCombinerBaseWrapper> codec()
{
auto codec = codec::object<CornerCombinerBaseWrapper>();
codec.required("type", codec::eq<std::string>("CornerCombinerBase"));
codec.optional("Power",
[](const CornerCombinerBaseWrapper& mw) {return mw.module.GetPower(); },
[](CornerCombinerBaseWrapper& mw, double power) {mw.module.SetPower(power); });
return codec;
}
};
}
}
|
Fix find&replace error in CornerCombinerBase codec
|
Fix find&replace error in CornerCombinerBase codec
|
C
|
mit
|
Wangscape/Wangscape,serin-delaunay/Wangscape,Wangscape/Wangscape,serin-delaunay/Wangscape,Wangscape/Wangscape
|
c
|
## Code Before:
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::module::Wrapper<noise::module::CornerCombinerBase>>
{
using CornerCombinerBaseWrapper = noise::module::Wrapper<noise::module::CornerCombinerBase>;
static codec::object_t<CornerCombinerBaseWrapper> codec()
{
auto codec = codec::object<CornerCombinerBaseWrapper>();
codec.required("type", codec::eq<std::string>("CornerCombinerBase"));
codec.optional("Power",
[](CornerCombinerBase CornerCombinerBaseWrapper& mw) {return mw.module.GetPower(); },
[](CornerCombinerBaseWrapper& mw, double power) {mw.module.SetPower(power); });
return codec;
}
};
}
}
## Instruction:
Fix find&replace error in CornerCombinerBase codec
## Code After:
namespace spotify
{
namespace json
{
template<>
struct default_codec_t<noise::module::Wrapper<noise::module::CornerCombinerBase>>
{
using CornerCombinerBaseWrapper = noise::module::Wrapper<noise::module::CornerCombinerBase>;
static codec::object_t<CornerCombinerBaseWrapper> codec()
{
auto codec = codec::object<CornerCombinerBaseWrapper>();
codec.required("type", codec::eq<std::string>("CornerCombinerBase"));
codec.optional("Power",
[](const CornerCombinerBaseWrapper& mw) {return mw.module.GetPower(); },
[](CornerCombinerBaseWrapper& mw, double power) {mw.module.SetPower(power); });
return codec;
}
};
}
}
|
// ... existing code ...
auto codec = codec::object<CornerCombinerBaseWrapper>();
codec.required("type", codec::eq<std::string>("CornerCombinerBase"));
codec.optional("Power",
[](const CornerCombinerBaseWrapper& mw) {return mw.module.GetPower(); },
[](CornerCombinerBaseWrapper& mw, double power) {mw.module.SetPower(power); });
return codec;
}
// ... rest of the code ...
|
2e729b437434e6d355602f9fd74bc9bd3b42f120
|
core/tests.py
|
core/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
from core.models import Profile, User
class ProfileTestCase(TestCase):
"""This class defines the test suite for the Person model."""
def setUp(self):
"""Define the test variables."""
self.username = "some-test-user"
self.email = "[email protected]"
self.password = "passgoeshere123"
self.user = User(
username=self.username,
email=self.email,
password=self.password
)
def test_model_can_create_a_profile(self):
"""Test the Person model can create a profile."""
old_count = Profile.objects.count()
self.user.save()
self.profile = self.profile = Profile(user=self.user)
self.profile.save()
new_count = Profile.objects.count()
self.assertNotEqual(old_count, new_count)
|
Add test to model Profile
|
Add test to model Profile
|
Python
|
mit
|
desenho-sw-g5/service_control,desenho-sw-g5/service_control
|
python
|
## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Add test to model Profile
## Code After:
from django.test import TestCase
from core.models import Profile, User
class ProfileTestCase(TestCase):
"""This class defines the test suite for the Person model."""
def setUp(self):
"""Define the test variables."""
self.username = "some-test-user"
self.email = "[email protected]"
self.password = "passgoeshere123"
self.user = User(
username=self.username,
email=self.email,
password=self.password
)
def test_model_can_create_a_profile(self):
"""Test the Person model can create a profile."""
old_count = Profile.objects.count()
self.user.save()
self.profile = self.profile = Profile(user=self.user)
self.profile.save()
new_count = Profile.objects.count()
self.assertNotEqual(old_count, new_count)
|
# ... existing code ...
from django.test import TestCase
from core.models import Profile, User
class ProfileTestCase(TestCase):
"""This class defines the test suite for the Person model."""
def setUp(self):
"""Define the test variables."""
self.username = "some-test-user"
self.email = "[email protected]"
self.password = "passgoeshere123"
self.user = User(
username=self.username,
email=self.email,
password=self.password
)
def test_model_can_create_a_profile(self):
"""Test the Person model can create a profile."""
old_count = Profile.objects.count()
self.user.save()
self.profile = self.profile = Profile(user=self.user)
self.profile.save()
new_count = Profile.objects.count()
self.assertNotEqual(old_count, new_count)
# ... rest of the code ...
|
c9580f8d700308df2d3bf5710261314d402fc826
|
democracy_club/settings/testing.py
|
democracy_club/settings/testing.py
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
|
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
Add placeholder values for trello items in tests
|
Add placeholder values for trello items in tests
|
Python
|
bsd-3-clause
|
DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website,DemocracyClub/Website
|
python
|
## Code Before:
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
## Instruction:
Add placeholder values for trello items in tests
## Code After:
from .base import * # noqa
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'dc_website_test',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
|
# ... existing code ...
}
BACKLOG_TRELLO_BOARD_ID = "empty"
BACKLOG_TRELLO_DEFAULT_LIST_ID = "empty"
BACKLOG_TRELLO_KEY = "empty"
BACKLOG_TRELLO_TOKEN = "empty"
# ... rest of the code ...
|
a29d712b69f64ec248fb7f6829da9996dc5b217a
|
tests/integration/test_with_activemq.py
|
tests/integration/test_with_activemq.py
|
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
|
import os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
|
Test ActiveMQ under SSL connection
|
Test ActiveMQ under SSL connection
|
Python
|
bsd-3-clause
|
rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective
|
python
|
## Code Before:
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
## Instruction:
Test ActiveMQ under SSL connection
## Code After:
import os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
|
# ... existing code ...
import os
from pymco.test import ctxt
from . import base
# ... modified code ...
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
# ... rest of the code ...
|
8348cf481dc098cb5cf583dd86a6923c9c03d5f5
|
freight/utils/auth.py
|
freight/utils/auth.py
|
from __future__ import absolute_import
from flask import current_app, request, session
from freight.models import User
from freight.testutils.fixtures import Fixtures
NOT_SET = object()
def get_current_user():
"""
Return the currently authenticated user based on their active session.
Will return a dummy user if in development mode.
"""
if getattr(request, 'current_user', NOT_SET) is NOT_SET:
if current_app.config.get('DEV'):
request.current_user = User.query.filter(
User.name == 'Freight',
).first()
if not request.current_user:
request.current_user = Fixtures().create_user(
name='Freight',
)
elif session.get('uid') is None:
request.current_user = None
else:
request.current_user = User.query.get(session['uid'])
if request.current_user is None:
del session['uid']
return request.current_user
|
from __future__ import absolute_import
from flask import current_app, request, session
from freight.models import User
NOT_SET = object()
def get_current_user():
"""
Return the currently authenticated user based on their active session.
Will return a dummy user if in development mode.
"""
if getattr(request, 'current_user', NOT_SET) is NOT_SET:
if current_app.config.get('DEV'):
from freight.testutils.fixtures import Fixtures
request.current_user = User.query.filter(
User.name == 'Freight',
).first()
if not request.current_user:
request.current_user = Fixtures().create_user(
name='Freight',
)
elif session.get('uid') is None:
request.current_user = None
else:
request.current_user = User.query.get(session['uid'])
if request.current_user is None:
del session['uid']
return request.current_user
|
Move fixture import to only be in DEV
|
Move fixture import to only be in DEV
|
Python
|
apache-2.0
|
getsentry/freight,getsentry/freight,getsentry/freight,getsentry/freight,getsentry/freight
|
python
|
## Code Before:
from __future__ import absolute_import
from flask import current_app, request, session
from freight.models import User
from freight.testutils.fixtures import Fixtures
NOT_SET = object()
def get_current_user():
"""
Return the currently authenticated user based on their active session.
Will return a dummy user if in development mode.
"""
if getattr(request, 'current_user', NOT_SET) is NOT_SET:
if current_app.config.get('DEV'):
request.current_user = User.query.filter(
User.name == 'Freight',
).first()
if not request.current_user:
request.current_user = Fixtures().create_user(
name='Freight',
)
elif session.get('uid') is None:
request.current_user = None
else:
request.current_user = User.query.get(session['uid'])
if request.current_user is None:
del session['uid']
return request.current_user
## Instruction:
Move fixture import to only be in DEV
## Code After:
from __future__ import absolute_import
from flask import current_app, request, session
from freight.models import User
NOT_SET = object()
def get_current_user():
"""
Return the currently authenticated user based on their active session.
Will return a dummy user if in development mode.
"""
if getattr(request, 'current_user', NOT_SET) is NOT_SET:
if current_app.config.get('DEV'):
from freight.testutils.fixtures import Fixtures
request.current_user = User.query.filter(
User.name == 'Freight',
).first()
if not request.current_user:
request.current_user = Fixtures().create_user(
name='Freight',
)
elif session.get('uid') is None:
request.current_user = None
else:
request.current_user = User.query.get(session['uid'])
if request.current_user is None:
del session['uid']
return request.current_user
|
// ... existing code ...
from flask import current_app, request, session
from freight.models import User
NOT_SET = object()
// ... modified code ...
"""
if getattr(request, 'current_user', NOT_SET) is NOT_SET:
if current_app.config.get('DEV'):
from freight.testutils.fixtures import Fixtures
request.current_user = User.query.filter(
User.name == 'Freight',
).first()
// ... rest of the code ...
|
6142f5996021eb7199f520bdeb5243321edd65f9
|
integration-tests/gradle/projects/it-multiplatform-0/build.gradle.kts
|
integration-tests/gradle/projects/it-multiplatform-0/build.gradle.kts
|
import org.jetbrains.dokka.gradle.DokkaTask
plugins {
kotlin("multiplatform")
id("org.jetbrains.dokka")
}
apply(from = "../template.root.gradle.kts")
kotlin {
jvm()
linuxX64("linux")
macosX64("macos")
sourceSets {
named("commonMain") {
dependencies {
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.8-1.4.0-rc")
}
}
}
}
tasks.withType<DokkaTask> {
dokkaSourceSets {
create("commonMain")
create("jvmMain")
create("linuxMain")
create("macosMain")
}
}
|
import org.jetbrains.dokka.gradle.DokkaTask
plugins {
kotlin("multiplatform")
id("org.jetbrains.dokka")
}
apply(from = "../template.root.gradle.kts")
kotlin {
jvm()
linuxX64("linux")
macosX64("macos")
js()
sourceSets {
named("commonMain") {
dependencies {
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.8-1.4.0-rc")
}
}
}
}
tasks.withType<DokkaTask> {
dokkaSourceSets {
create("commonMain")
create("jvmMain")
create("linuxMain")
create("macosMain")
create("jsMain")
}
}
|
Add js target back to Multiplatform0GradleIntegrationTest
|
Add js target back to Multiplatform0GradleIntegrationTest
|
Kotlin
|
apache-2.0
|
Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka
|
kotlin
|
## Code Before:
import org.jetbrains.dokka.gradle.DokkaTask
plugins {
kotlin("multiplatform")
id("org.jetbrains.dokka")
}
apply(from = "../template.root.gradle.kts")
kotlin {
jvm()
linuxX64("linux")
macosX64("macos")
sourceSets {
named("commonMain") {
dependencies {
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.8-1.4.0-rc")
}
}
}
}
tasks.withType<DokkaTask> {
dokkaSourceSets {
create("commonMain")
create("jvmMain")
create("linuxMain")
create("macosMain")
}
}
## Instruction:
Add js target back to Multiplatform0GradleIntegrationTest
## Code After:
import org.jetbrains.dokka.gradle.DokkaTask
plugins {
kotlin("multiplatform")
id("org.jetbrains.dokka")
}
apply(from = "../template.root.gradle.kts")
kotlin {
jvm()
linuxX64("linux")
macosX64("macos")
js()
sourceSets {
named("commonMain") {
dependencies {
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.3.8-1.4.0-rc")
}
}
}
}
tasks.withType<DokkaTask> {
dokkaSourceSets {
create("commonMain")
create("jvmMain")
create("linuxMain")
create("macosMain")
create("jsMain")
}
}
|
...
jvm()
linuxX64("linux")
macosX64("macos")
js()
sourceSets {
named("commonMain") {
dependencies {
...
create("jvmMain")
create("linuxMain")
create("macosMain")
create("jsMain")
}
}
...
|
b861ab0d1cde2cd8f614c52c231008ddbb82a199
|
src/main/java/com/imcode/imcms/mapping/jpa/doc/content/VersionedContent.java
|
src/main/java/com/imcode/imcms/mapping/jpa/doc/content/VersionedContent.java
|
package com.imcode.imcms.mapping.jpa.doc.content;
import com.imcode.imcms.mapping.jpa.doc.Version;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@MappedSuperclass
public abstract class VersionedContent {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
@Column(name = "doc_id", unique = true, nullable = false, insertable = false, updatable = false)
private Integer documentId;
@NotNull
@ManyToOne
@JoinColumns({
@JoinColumn(name = "doc_id", referencedColumnName = "doc_id"),
@JoinColumn(name = "doc_version_no", referencedColumnName = "no")
})
private Version version;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Version getVersion() {
return version;
}
public void setVersion(Version contentVersion) {
this.version = contentVersion;
this.documentId = contentVersion.getDocId();
this.id = contentVersion.getId();
}
public Integer getDocumentId() {
return documentId;
}
}
|
package com.imcode.imcms.mapping.jpa.doc.content;
import com.imcode.imcms.mapping.jpa.doc.Version;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@MappedSuperclass
public abstract class VersionedContent {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
@Column(name = "doc_id", unique = true, nullable = false, insertable = false, updatable = false)
private Integer documentId;
@NotNull
@ManyToOne
@JoinColumns({
@JoinColumn(name = "doc_id", referencedColumnName = "doc_id"),
@JoinColumn(name = "doc_version_no", referencedColumnName = "no")
})
private Version version;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Version getVersion() {
return version;
}
public void setVersion(Version contentVersion) {
this.version = contentVersion;
this.documentId = contentVersion.getDocId();
}
public Integer getDocumentId() {
return documentId;
}
}
|
Apply new UI to the admin panel and editors: - Versioned content no more stealing it's version's id.
|
IMCMS-233: Apply new UI to the admin panel and editors:
- Versioned content no more stealing it's version's id.
|
Java
|
agpl-3.0
|
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
|
java
|
## Code Before:
package com.imcode.imcms.mapping.jpa.doc.content;
import com.imcode.imcms.mapping.jpa.doc.Version;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@MappedSuperclass
public abstract class VersionedContent {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
@Column(name = "doc_id", unique = true, nullable = false, insertable = false, updatable = false)
private Integer documentId;
@NotNull
@ManyToOne
@JoinColumns({
@JoinColumn(name = "doc_id", referencedColumnName = "doc_id"),
@JoinColumn(name = "doc_version_no", referencedColumnName = "no")
})
private Version version;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Version getVersion() {
return version;
}
public void setVersion(Version contentVersion) {
this.version = contentVersion;
this.documentId = contentVersion.getDocId();
this.id = contentVersion.getId();
}
public Integer getDocumentId() {
return documentId;
}
}
## Instruction:
IMCMS-233: Apply new UI to the admin panel and editors:
- Versioned content no more stealing it's version's id.
## Code After:
package com.imcode.imcms.mapping.jpa.doc.content;
import com.imcode.imcms.mapping.jpa.doc.Version;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@MappedSuperclass
public abstract class VersionedContent {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
@Column(name = "doc_id", unique = true, nullable = false, insertable = false, updatable = false)
private Integer documentId;
@NotNull
@ManyToOne
@JoinColumns({
@JoinColumn(name = "doc_id", referencedColumnName = "doc_id"),
@JoinColumn(name = "doc_version_no", referencedColumnName = "no")
})
private Version version;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Version getVersion() {
return version;
}
public void setVersion(Version contentVersion) {
this.version = contentVersion;
this.documentId = contentVersion.getDocId();
}
public Integer getDocumentId() {
return documentId;
}
}
|
# ... existing code ...
public void setVersion(Version contentVersion) {
this.version = contentVersion;
this.documentId = contentVersion.getDocId();
}
public Integer getDocumentId() {
# ... rest of the code ...
|
b6927cadb72e0a73700416d0218a569c15ec8818
|
generative/tests/compare_test/concat_first/run.py
|
generative/tests/compare_test/concat_first/run.py
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import subprocess
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('layer', type=str, help='fc6|conv42|pool1')
parser.add_argument('--cuda-device', type=int, default=0, help='0|1|2|3 [default: 0]')
args = parser.parse_args()
for i in xrange(5):
out_dir = './trained_models/%s/%d' % (args.layer, i + 1)
train_test_split_dir = './train_test_split/%d' % (i + 1)
command = 'CUDA_VISIBLE_DEVICES={device} python train_average.py {layer} --train-test-split-dir {split_dir} --out-dir {out_dir} --cuda'.format(
device=args.cuda_device, layer=args.layer, split_dir=train_test_split_dir, out_dir=out_dir)
subprocess.call(command, shell=True)
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import subprocess
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('layer', type=str, help='fc6|conv42|pool1')
parser.add_argument('--cuda-device', type=int, default=0, help='0|1|2|3 [default: 0]')
args = parser.parse_args()
for i in xrange(5):
out_dir = '/mnt/visual_communication_dataset/trained_models_5_30_18/%s/%d' % (args.layer, i + 1)
train_test_split_dir = './train_test_split/%d' % (i + 1)
command = 'CUDA_VISIBLE_DEVICES={device} python train_average.py {layer} --train-test-split-dir {split_dir} --out-dir {out_dir} --cuda'.format(
device=args.cuda_device, layer=args.layer, split_dir=train_test_split_dir, out_dir=out_dir)
subprocess.call(command, shell=True)
|
Update path to save to mnt dir
|
Update path to save to mnt dir
|
Python
|
mit
|
judithfan/pix2svg
|
python
|
## Code Before:
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import subprocess
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('layer', type=str, help='fc6|conv42|pool1')
parser.add_argument('--cuda-device', type=int, default=0, help='0|1|2|3 [default: 0]')
args = parser.parse_args()
for i in xrange(5):
out_dir = './trained_models/%s/%d' % (args.layer, i + 1)
train_test_split_dir = './train_test_split/%d' % (i + 1)
command = 'CUDA_VISIBLE_DEVICES={device} python train_average.py {layer} --train-test-split-dir {split_dir} --out-dir {out_dir} --cuda'.format(
device=args.cuda_device, layer=args.layer, split_dir=train_test_split_dir, out_dir=out_dir)
subprocess.call(command, shell=True)
## Instruction:
Update path to save to mnt dir
## Code After:
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import subprocess
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('layer', type=str, help='fc6|conv42|pool1')
parser.add_argument('--cuda-device', type=int, default=0, help='0|1|2|3 [default: 0]')
args = parser.parse_args()
for i in xrange(5):
out_dir = '/mnt/visual_communication_dataset/trained_models_5_30_18/%s/%d' % (args.layer, i + 1)
train_test_split_dir = './train_test_split/%d' % (i + 1)
command = 'CUDA_VISIBLE_DEVICES={device} python train_average.py {layer} --train-test-split-dir {split_dir} --out-dir {out_dir} --cuda'.format(
device=args.cuda_device, layer=args.layer, split_dir=train_test_split_dir, out_dir=out_dir)
subprocess.call(command, shell=True)
|
// ... existing code ...
args = parser.parse_args()
for i in xrange(5):
out_dir = '/mnt/visual_communication_dataset/trained_models_5_30_18/%s/%d' % (args.layer, i + 1)
train_test_split_dir = './train_test_split/%d' % (i + 1)
command = 'CUDA_VISIBLE_DEVICES={device} python train_average.py {layer} --train-test-split-dir {split_dir} --out-dir {out_dir} --cuda'.format(
device=args.cuda_device, layer=args.layer, split_dir=train_test_split_dir, out_dir=out_dir)
// ... rest of the code ...
|
f403f86cad3eb1b6b41a39f02ea8e1b0b06cff2a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='regrowl',
description='Regrowl server',
author='Paul Traylor',
url='https://github.com/kfdm/gntp-regrowl',
version='0.0.1',
packages=[
'regrowl',
'regrowl.bridge',
'regrowl.extras',
],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=[
'gntp',
],
entry_points={
'console_scripts': [
'regrowl = regrowl.cli:main'
],
'regrowl.bridge': [
'echo = regrowl.bridge.echo:EchoNotifier',
'local = regrowl.bridge.local:LocalNotifier',
'subscribe = regrowl.bridge.subscribe:SubscribelNotifier',
'udp = regrowl.bridge.udp:UDPNotifier',
]
}
)
|
from setuptools import setup
setup(
name='regrowl',
description='Regrowl server',
author='Paul Traylor',
url='https://github.com/kfdm/gntp-regrowl',
version='0.0.1',
packages=[
'regrowl',
'regrowl.bridge',
'regrowl.extras',
],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=[
'gntp',
],
entry_points={
'console_scripts': [
'regrowl = regrowl.cli:main'
],
'regrowl.bridge': [
'echo = regrowl.bridge.echo:EchoNotifier',
'forward = regrowl.bridge.forward:ForwardNotifier',
'local = regrowl.bridge.local:LocalNotifier',
'subscribe = regrowl.bridge.subscribe:SubscribelNotifier',
'udp = regrowl.bridge.udp:UDPNotifier',
]
}
)
|
Add forward to list of bridges
|
Add forward to list of bridges
|
Python
|
mit
|
kfdm/gntp-regrowl
|
python
|
## Code Before:
from setuptools import setup
setup(
name='regrowl',
description='Regrowl server',
author='Paul Traylor',
url='https://github.com/kfdm/gntp-regrowl',
version='0.0.1',
packages=[
'regrowl',
'regrowl.bridge',
'regrowl.extras',
],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=[
'gntp',
],
entry_points={
'console_scripts': [
'regrowl = regrowl.cli:main'
],
'regrowl.bridge': [
'echo = regrowl.bridge.echo:EchoNotifier',
'local = regrowl.bridge.local:LocalNotifier',
'subscribe = regrowl.bridge.subscribe:SubscribelNotifier',
'udp = regrowl.bridge.udp:UDPNotifier',
]
}
)
## Instruction:
Add forward to list of bridges
## Code After:
from setuptools import setup
setup(
name='regrowl',
description='Regrowl server',
author='Paul Traylor',
url='https://github.com/kfdm/gntp-regrowl',
version='0.0.1',
packages=[
'regrowl',
'regrowl.bridge',
'regrowl.extras',
],
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=[
'gntp',
],
entry_points={
'console_scripts': [
'regrowl = regrowl.cli:main'
],
'regrowl.bridge': [
'echo = regrowl.bridge.echo:EchoNotifier',
'forward = regrowl.bridge.forward:ForwardNotifier',
'local = regrowl.bridge.local:LocalNotifier',
'subscribe = regrowl.bridge.subscribe:SubscribelNotifier',
'udp = regrowl.bridge.udp:UDPNotifier',
]
}
)
|
...
],
'regrowl.bridge': [
'echo = regrowl.bridge.echo:EchoNotifier',
'forward = regrowl.bridge.forward:ForwardNotifier',
'local = regrowl.bridge.local:LocalNotifier',
'subscribe = regrowl.bridge.subscribe:SubscribelNotifier',
'udp = regrowl.bridge.udp:UDPNotifier',
...
|
5d65b35623d2dbdb518a6e4a7f95ec224bf879a1
|
ros_start/scritps/service_client.py
|
ros_start/scritps/service_client.py
|
import rospy
from std_srvs.srv import Empty
def service_client():
rospy.loginfo('waiting service')
rospy.wait_for_service('call_me')
try:
service = rospy.ServiceProxy('call_me', Empty)
response = service()
except rospy.ServiceException, e:
print "Service call failed: %s" % e
if __name__ == "__main__":
service_client()
|
import rospy
from std_srvs.srv import Empty
def call_service():
rospy.loginfo('waiting service')
rospy.wait_for_service('call_me')
try:
service = rospy.ServiceProxy('call_me', Empty)
response = service()
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def service_client():
rospy.init_node('service_client')
call_service()
rospy.spin()
if __name__ == "__main__":
service_client()
|
Add initialization of the service client node.
|
Add initialization of the service client node.
|
Python
|
bsd-2-clause
|
OTL/ros_book_programs,OTL/ros_book_programs
|
python
|
## Code Before:
import rospy
from std_srvs.srv import Empty
def service_client():
rospy.loginfo('waiting service')
rospy.wait_for_service('call_me')
try:
service = rospy.ServiceProxy('call_me', Empty)
response = service()
except rospy.ServiceException, e:
print "Service call failed: %s" % e
if __name__ == "__main__":
service_client()
## Instruction:
Add initialization of the service client node.
## Code After:
import rospy
from std_srvs.srv import Empty
def call_service():
rospy.loginfo('waiting service')
rospy.wait_for_service('call_me')
try:
service = rospy.ServiceProxy('call_me', Empty)
response = service()
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def service_client():
rospy.init_node('service_client')
call_service()
rospy.spin()
if __name__ == "__main__":
service_client()
|
# ... existing code ...
import rospy
from std_srvs.srv import Empty
def call_service():
rospy.loginfo('waiting service')
rospy.wait_for_service('call_me')
try:
service = rospy.ServiceProxy('call_me', Empty)
response = service()
# ... modified code ...
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def service_client():
rospy.init_node('service_client')
call_service()
rospy.spin()
if __name__ == "__main__":
service_client()
# ... rest of the code ...
|
a52bd5acd50d37314247e4ffaed501ba08e0eca3
|
tests/test_simple_model.py
|
tests/test_simple_model.py
|
"""Tests for creating a simple tight-binding model."""
import pytest
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t1', T_VALUES)
@pytest.mark.parametrize('k', KPT)
def test_simple(t1, get_model, k, compare_data, models_equal, compare_isclose):
"""Regression test for a simple manually created tight-binding model."""
model = get_model(*t1)
compare_isclose(model.hamilton(k), tag='hamilton')
compare_isclose(model.eigenval(k), tag='eigenval')
compare_data(models_equal, model)
@pytest.mark.parametrize('t1', T_VALUES)
def test_invalid_dim(t1, get_model):
"""
Check that an error is raised when the dimension does not match
the hopping matrix keys.
"""
with pytest.raises(ValueError):
get_model(*t1, dim=2)
|
"""Tests for creating a simple tight-binding model."""
import pytest
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t1', T_VALUES)
@pytest.mark.parametrize('k', KPT)
def test_simple(t1, get_model, k, compare_data, models_equal, compare_isclose):
"""Regression test for a simple manually created tight-binding model."""
model = get_model(*t1)
compare_isclose(model.hamilton(k), tag='hamilton')
compare_isclose(model.eigenval(k), tag='eigenval')
compare_data(models_equal, model)
def test_invalid_dim(get_model):
"""
Check that an error is raised when the reciprocal lattice vector
does not match the dimension.
"""
model = get_model(0.1, 0.2)
model.add_hop(1j, 0, 1, (0, 1, 2))
with pytest.raises(ValueError):
model.add_hop(1j, 0, 1, (0, 1))
|
Fix test broken by previous commit.
|
Fix test broken by previous commit.
|
Python
|
apache-2.0
|
Z2PackDev/TBmodels,Z2PackDev/TBmodels
|
python
|
## Code Before:
"""Tests for creating a simple tight-binding model."""
import pytest
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t1', T_VALUES)
@pytest.mark.parametrize('k', KPT)
def test_simple(t1, get_model, k, compare_data, models_equal, compare_isclose):
"""Regression test for a simple manually created tight-binding model."""
model = get_model(*t1)
compare_isclose(model.hamilton(k), tag='hamilton')
compare_isclose(model.eigenval(k), tag='eigenval')
compare_data(models_equal, model)
@pytest.mark.parametrize('t1', T_VALUES)
def test_invalid_dim(t1, get_model):
"""
Check that an error is raised when the dimension does not match
the hopping matrix keys.
"""
with pytest.raises(ValueError):
get_model(*t1, dim=2)
## Instruction:
Fix test broken by previous commit.
## Code After:
"""Tests for creating a simple tight-binding model."""
import pytest
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t1', T_VALUES)
@pytest.mark.parametrize('k', KPT)
def test_simple(t1, get_model, k, compare_data, models_equal, compare_isclose):
"""Regression test for a simple manually created tight-binding model."""
model = get_model(*t1)
compare_isclose(model.hamilton(k), tag='hamilton')
compare_isclose(model.eigenval(k), tag='eigenval')
compare_data(models_equal, model)
def test_invalid_dim(get_model):
"""
Check that an error is raised when the reciprocal lattice vector
does not match the dimension.
"""
model = get_model(0.1, 0.2)
model.add_hop(1j, 0, 1, (0, 1, 2))
with pytest.raises(ValueError):
model.add_hop(1j, 0, 1, (0, 1))
|
...
compare_data(models_equal, model)
def test_invalid_dim(get_model):
"""
Check that an error is raised when the reciprocal lattice vector
does not match the dimension.
"""
model = get_model(0.1, 0.2)
model.add_hop(1j, 0, 1, (0, 1, 2))
with pytest.raises(ValueError):
model.add_hop(1j, 0, 1, (0, 1))
...
|
51d5be75475f3d03445f02b34dbaf9b5a31174c5
|
src/host/os_isfile.c
|
src/host/os_isfile.c
|
/**
* \file os_isfile.c
* \brief Returns true if the given file exists on the file system.
* \author Copyright (c) 2002-2008 Jason Perkins and the Premake project
*/
#include <sys/stat.h>
#include "premake.h"
int os_isfile(lua_State* L)
{
const char* filename = luaL_checkstring(L, 1);
lua_pushboolean(L, do_isfile(filename));
return 1;
}
int do_isfile(const char* filename)
{
struct stat buf;
#if PLATFORM_WINDOWS
DWORD attrib = GetFileAttributesA(filename);
if (attrib != INVALID_FILE_ATTRIBUTES)
{
return (attrib & FILE_ATTRIBUTE_DIRECTORY) == 0;
}
#else
if (stat(filename, &buf) == 0)
{
return ((buf.st_mode & S_IFDIR) == 0);
}
#endif
return 0;
}
|
/**
* \file os_isfile.c
* \brief Returns true if the given file exists on the file system.
* \author Copyright (c) 2002-2008 Jason Perkins and the Premake project
*/
#include <sys/stat.h>
#include "premake.h"
int os_isfile(lua_State* L)
{
const char* filename = luaL_checkstring(L, 1);
lua_pushboolean(L, do_isfile(filename));
return 1;
}
int do_isfile(const char* filename)
{
#if PLATFORM_WINDOWS
DWORD attrib = GetFileAttributesA(filename);
if (attrib != INVALID_FILE_ATTRIBUTES)
{
return (attrib & FILE_ATTRIBUTE_DIRECTORY) == 0;
}
#else
struct stat buf;
if (stat(filename, &buf) == 0)
{
return ((buf.st_mode & S_IFDIR) == 0);
}
#endif
return 0;
}
|
Fix a new Visual Studio unused variable build warning
|
Fix a new Visual Studio unused variable build warning
|
C
|
bsd-3-clause
|
LORgames/premake-core,CodeAnxiety/premake-core,bravnsgaard/premake-core,dcourtois/premake-core,CodeAnxiety/premake-core,sleepingwit/premake-core,premake/premake-core,resetnow/premake-core,lizh06/premake-core,TurkeyMan/premake-core,premake/premake-core,Blizzard/premake-core,CodeAnxiety/premake-core,Zefiros-Software/premake-core,aleksijuvani/premake-core,tvandijck/premake-core,mandersan/premake-core,mandersan/premake-core,noresources/premake-core,aleksijuvani/premake-core,aleksijuvani/premake-core,starkos/premake-core,jstewart-amd/premake-core,LORgames/premake-core,mandersan/premake-core,xriss/premake-core,TurkeyMan/premake-core,CodeAnxiety/premake-core,starkos/premake-core,sleepingwit/premake-core,mandersan/premake-core,lizh06/premake-core,noresources/premake-core,tvandijck/premake-core,mendsley/premake-core,jstewart-amd/premake-core,premake/premake-core,soundsrc/premake-core,xriss/premake-core,premake/premake-core,mendsley/premake-core,mendsley/premake-core,dcourtois/premake-core,starkos/premake-core,lizh06/premake-core,dcourtois/premake-core,noresources/premake-core,bravnsgaard/premake-core,dcourtois/premake-core,Blizzard/premake-core,noresources/premake-core,noresources/premake-core,soundsrc/premake-core,resetnow/premake-core,sleepingwit/premake-core,martin-traverse/premake-core,soundsrc/premake-core,mendsley/premake-core,tvandijck/premake-core,CodeAnxiety/premake-core,bravnsgaard/premake-core,bravnsgaard/premake-core,xriss/premake-core,dcourtois/premake-core,starkos/premake-core,TurkeyMan/premake-core,aleksijuvani/premake-core,dcourtois/premake-core,aleksijuvani/premake-core,soundsrc/premake-core,dcourtois/premake-core,Blizzard/premake-core,soundsrc/premake-core,starkos/premake-core,Blizzard/premake-core,Zefiros-Software/premake-core,jstewart-amd/premake-core,resetnow/premake-core,TurkeyMan/premake-core,starkos/premake-core,resetnow/premake-core,tvandijck/premake-core,premake/premake-core,martin-traverse/premake-core,martin-traverse/premake-core,resetnow/premake-core,sleepingwit/premake-core,sleepingwit/premake-core,Zefiros-Software/premake-core,xriss/premake-core,lizh06/premake-core,bravnsgaard/premake-core,Blizzard/premake-core,premake/premake-core,jstewart-amd/premake-core,martin-traverse/premake-core,mendsley/premake-core,LORgames/premake-core,Zefiros-Software/premake-core,premake/premake-core,jstewart-amd/premake-core,xriss/premake-core,noresources/premake-core,LORgames/premake-core,Blizzard/premake-core,LORgames/premake-core,tvandijck/premake-core,TurkeyMan/premake-core,mandersan/premake-core,noresources/premake-core,starkos/premake-core,Zefiros-Software/premake-core
|
c
|
## Code Before:
/**
* \file os_isfile.c
* \brief Returns true if the given file exists on the file system.
* \author Copyright (c) 2002-2008 Jason Perkins and the Premake project
*/
#include <sys/stat.h>
#include "premake.h"
int os_isfile(lua_State* L)
{
const char* filename = luaL_checkstring(L, 1);
lua_pushboolean(L, do_isfile(filename));
return 1;
}
int do_isfile(const char* filename)
{
struct stat buf;
#if PLATFORM_WINDOWS
DWORD attrib = GetFileAttributesA(filename);
if (attrib != INVALID_FILE_ATTRIBUTES)
{
return (attrib & FILE_ATTRIBUTE_DIRECTORY) == 0;
}
#else
if (stat(filename, &buf) == 0)
{
return ((buf.st_mode & S_IFDIR) == 0);
}
#endif
return 0;
}
## Instruction:
Fix a new Visual Studio unused variable build warning
## Code After:
/**
* \file os_isfile.c
* \brief Returns true if the given file exists on the file system.
* \author Copyright (c) 2002-2008 Jason Perkins and the Premake project
*/
#include <sys/stat.h>
#include "premake.h"
int os_isfile(lua_State* L)
{
const char* filename = luaL_checkstring(L, 1);
lua_pushboolean(L, do_isfile(filename));
return 1;
}
int do_isfile(const char* filename)
{
#if PLATFORM_WINDOWS
DWORD attrib = GetFileAttributesA(filename);
if (attrib != INVALID_FILE_ATTRIBUTES)
{
return (attrib & FILE_ATTRIBUTE_DIRECTORY) == 0;
}
#else
struct stat buf;
if (stat(filename, &buf) == 0)
{
return ((buf.st_mode & S_IFDIR) == 0);
}
#endif
return 0;
}
|
// ... existing code ...
int do_isfile(const char* filename)
{
#if PLATFORM_WINDOWS
DWORD attrib = GetFileAttributesA(filename);
if (attrib != INVALID_FILE_ATTRIBUTES)
// ... modified code ...
return (attrib & FILE_ATTRIBUTE_DIRECTORY) == 0;
}
#else
struct stat buf;
if (stat(filename, &buf) == 0)
{
return ((buf.st_mode & S_IFDIR) == 0);
// ... rest of the code ...
|
2672954aebf325f7c73f0539fd5e6aa53f4bb1e9
|
Parse-RACExtensions/PFQuery+RACExtensions.h
|
Parse-RACExtensions/PFQuery+RACExtensions.h
|
//
// PFQuery+RACExtensions.h
// Parse-RACExtensions
//
// Created by Dave Lee on 2013-06-28.
// Copyright (c) 2013 Dave Lee. All rights reserved.
//
#import <Parse/PFQuery.h>
@class RACSignal;
@interface PFQuery (RACExtensions)
/// Gets a PFObject with the given id.
///
/// @warning This mutates the PFQuery.
///
/// @see -getObjectInBackgroundWithId:block:
///
/// @return A signal that sends the identified PFObject.
- (RACSignal *)rac_getObjectWithId:(NSString *)objectId;
/// Finds objects based on the constructed query.
///
/// @see -findObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the NSArray of matching PFObjects.
- (RACSignal *)rac_findObjects;
/// Gets an object based on the constructed query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getFirstObjectInBackgroundWithBlock:
///
/// @return A signal that sends the first matching PFObject.
- (RACSignal *)rac_getFirstObject;
/// Counts objects based on the constructed query.
///
/// @see -countObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the integer count of matching PFObjects
- (RACSignal *)rac_countObjects;
@end
|
//
// PFQuery+RACExtensions.h
// Parse-RACExtensions
//
// Created by Dave Lee on 2013-06-28.
// Copyright (c) 2013 Dave Lee. All rights reserved.
//
#import <Parse/PFQuery.h>
@class RACSignal;
@interface PFQuery (RACExtensions)
/// Gets a PFObject with the given id.
///
/// Disposing subscription will also cancel the query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getObjectInBackgroundWithId:block:
///
/// @return A signal that sends the identified PFObject.
- (RACSignal *)rac_getObjectWithId:(NSString *)objectId;
/// Finds objects based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @see -findObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the NSArray of matching PFObjects.
- (RACSignal *)rac_findObjects;
/// Gets an object based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getFirstObjectInBackgroundWithBlock:
///
/// @return A signal that sends the first matching PFObject.
- (RACSignal *)rac_getFirstObject;
/// Counts objects based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @see -countObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the integer count of matching PFObjects
- (RACSignal *)rac_countObjects;
@end
|
Document PFQuery disposal side effects
|
Document PFQuery disposal side effects
|
C
|
mit
|
kastiglione/Parse-RACExtensions,chrrasmussen/Parse-RACExtensions,kastiglione/Parse-RACExtensions,chrrasmussen/Parse-RACExtensions
|
c
|
## Code Before:
//
// PFQuery+RACExtensions.h
// Parse-RACExtensions
//
// Created by Dave Lee on 2013-06-28.
// Copyright (c) 2013 Dave Lee. All rights reserved.
//
#import <Parse/PFQuery.h>
@class RACSignal;
@interface PFQuery (RACExtensions)
/// Gets a PFObject with the given id.
///
/// @warning This mutates the PFQuery.
///
/// @see -getObjectInBackgroundWithId:block:
///
/// @return A signal that sends the identified PFObject.
- (RACSignal *)rac_getObjectWithId:(NSString *)objectId;
/// Finds objects based on the constructed query.
///
/// @see -findObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the NSArray of matching PFObjects.
- (RACSignal *)rac_findObjects;
/// Gets an object based on the constructed query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getFirstObjectInBackgroundWithBlock:
///
/// @return A signal that sends the first matching PFObject.
- (RACSignal *)rac_getFirstObject;
/// Counts objects based on the constructed query.
///
/// @see -countObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the integer count of matching PFObjects
- (RACSignal *)rac_countObjects;
@end
## Instruction:
Document PFQuery disposal side effects
## Code After:
//
// PFQuery+RACExtensions.h
// Parse-RACExtensions
//
// Created by Dave Lee on 2013-06-28.
// Copyright (c) 2013 Dave Lee. All rights reserved.
//
#import <Parse/PFQuery.h>
@class RACSignal;
@interface PFQuery (RACExtensions)
/// Gets a PFObject with the given id.
///
/// Disposing subscription will also cancel the query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getObjectInBackgroundWithId:block:
///
/// @return A signal that sends the identified PFObject.
- (RACSignal *)rac_getObjectWithId:(NSString *)objectId;
/// Finds objects based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @see -findObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the NSArray of matching PFObjects.
- (RACSignal *)rac_findObjects;
/// Gets an object based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getFirstObjectInBackgroundWithBlock:
///
/// @return A signal that sends the first matching PFObject.
- (RACSignal *)rac_getFirstObject;
/// Counts objects based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @see -countObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the integer count of matching PFObjects
- (RACSignal *)rac_countObjects;
@end
|
# ... existing code ...
/// Gets a PFObject with the given id.
///
/// Disposing subscription will also cancel the query.
///
/// @warning This mutates the PFQuery.
///
/// @see -getObjectInBackgroundWithId:block:
# ... modified code ...
/// Finds objects based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @see -findObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the NSArray of matching PFObjects.
...
- (RACSignal *)rac_findObjects;
/// Gets an object based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @warning This mutates the PFQuery.
///
...
/// Counts objects based on the constructed query.
///
/// Disposing subscription will also cancel the query.
///
/// @see -countObjectsInBackgroundWithBlock:
///
/// @return A signal that sends the integer count of matching PFObjects
# ... rest of the code ...
|
e778a2da7938dcf565282635e395dc410ef989d6
|
terraform-gce/worker/generate-certs.py
|
terraform-gce/worker/generate-certs.py
|
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
|
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
|
Copy cert auth from master
|
Copy cert auth from master
|
Python
|
apache-2.0
|
aknuds1/contrib,aknuds1/contrib,aknuds1/contrib,aknuds1/contrib,aknuds1/contrib,aknuds1/contrib
|
python
|
## Code Before:
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
## Instruction:
Copy cert auth from master
## Code After:
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
|
// ... existing code ...
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
// ... rest of the code ...
|
50278a29e2bfbf316845d313abe20a9c12909ef0
|
commandline/src/main/java/org/musetest/commandline/Launcher.java
|
commandline/src/main/java/org/musetest/commandline/Launcher.java
|
package org.musetest.commandline;
import io.airlift.airline.*;
import org.musetest.core.commandline.*;
import org.reflections.*;
import javax.imageio.spi.*;
import java.util.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public class Launcher
{
@SuppressWarnings("unchecked")
public static void main(String[] args)
{
// dynamically lookup the commands using Java's ServiceRegistry. This looks at the META-INF/service files in jars on the classpath.
Iterator<MuseCommand> commands = ServiceRegistry.lookupProviders(MuseCommand.class);
List<Class<? extends Runnable>> implementors = new ArrayList<>();
while (commands.hasNext())
implementors.add((commands.next().getClass()));
Cli.CliBuilder<Runnable> builder = Cli.<Runnable>builder("muse")
.withDescription("Muse command-line tools")
.withDefaultCommand(Help.class)
.withCommands(Help.class)
.withCommands(implementors);
Cli<Runnable> muse_parser = builder.build();
try
{
muse_parser.parse(args).run();
}
catch (Exception e)
{
muse_parser.parse(new String[0]).run();
}
}
static
{
Reflections.log = null;
}
}
|
package org.musetest.commandline;
import io.airlift.airline.*;
import org.musetest.core.commandline.*;
import org.reflections.*;
import javax.imageio.spi.*;
import java.util.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public class Launcher
{
@SuppressWarnings("unchecked")
public static void main(String[] args)
{
// dynamically lookup the commands using Java's ServiceRegistry. This looks at the META-INF/service files in jars on the classpath.
Iterator<MuseCommand> commands = ServiceRegistry.lookupProviders(MuseCommand.class);
List<Class<? extends Runnable>> implementors = new ArrayList<>();
while (commands.hasNext())
implementors.add((commands.next().getClass()));
Cli.CliBuilder<Runnable> builder = Cli.<Runnable>builder("muse")
.withDescription("Muse command-line tools")
.withDefaultCommand(Help.class)
.withCommands(Help.class)
.withCommands(implementors);
Cli<Runnable> muse_parser = builder.build();
final Runnable command;
try
{
command = muse_parser.parse(args);
}
catch (Exception e)
{
muse_parser.parse(new String[0]).run();
return;
}
try
{
command.run();
}
catch (Exception e)
{
System.out.println(String.format("Command failed due to a %s.\n%s", e.getClass().getSimpleName(), e.getMessage()));
e.printStackTrace(System.err);
}
}
static
{
Reflections.log = null;
}
}
|
Handle command-line parsing errors separately from command execution errors.
|
Handle command-line parsing errors separately from command execution errors.
|
Java
|
apache-2.0
|
ChrisLMerrill/muse,ChrisLMerrill/muse
|
java
|
## Code Before:
package org.musetest.commandline;
import io.airlift.airline.*;
import org.musetest.core.commandline.*;
import org.reflections.*;
import javax.imageio.spi.*;
import java.util.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public class Launcher
{
@SuppressWarnings("unchecked")
public static void main(String[] args)
{
// dynamically lookup the commands using Java's ServiceRegistry. This looks at the META-INF/service files in jars on the classpath.
Iterator<MuseCommand> commands = ServiceRegistry.lookupProviders(MuseCommand.class);
List<Class<? extends Runnable>> implementors = new ArrayList<>();
while (commands.hasNext())
implementors.add((commands.next().getClass()));
Cli.CliBuilder<Runnable> builder = Cli.<Runnable>builder("muse")
.withDescription("Muse command-line tools")
.withDefaultCommand(Help.class)
.withCommands(Help.class)
.withCommands(implementors);
Cli<Runnable> muse_parser = builder.build();
try
{
muse_parser.parse(args).run();
}
catch (Exception e)
{
muse_parser.parse(new String[0]).run();
}
}
static
{
Reflections.log = null;
}
}
## Instruction:
Handle command-line parsing errors separately from command execution errors.
## Code After:
package org.musetest.commandline;
import io.airlift.airline.*;
import org.musetest.core.commandline.*;
import org.reflections.*;
import javax.imageio.spi.*;
import java.util.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public class Launcher
{
@SuppressWarnings("unchecked")
public static void main(String[] args)
{
// dynamically lookup the commands using Java's ServiceRegistry. This looks at the META-INF/service files in jars on the classpath.
Iterator<MuseCommand> commands = ServiceRegistry.lookupProviders(MuseCommand.class);
List<Class<? extends Runnable>> implementors = new ArrayList<>();
while (commands.hasNext())
implementors.add((commands.next().getClass()));
Cli.CliBuilder<Runnable> builder = Cli.<Runnable>builder("muse")
.withDescription("Muse command-line tools")
.withDefaultCommand(Help.class)
.withCommands(Help.class)
.withCommands(implementors);
Cli<Runnable> muse_parser = builder.build();
final Runnable command;
try
{
command = muse_parser.parse(args);
}
catch (Exception e)
{
muse_parser.parse(new String[0]).run();
return;
}
try
{
command.run();
}
catch (Exception e)
{
System.out.println(String.format("Command failed due to a %s.\n%s", e.getClass().getSimpleName(), e.getMessage()));
e.printStackTrace(System.err);
}
}
static
{
Reflections.log = null;
}
}
|
...
.withCommands(implementors);
Cli<Runnable> muse_parser = builder.build();
final Runnable command;
try
{
command = muse_parser.parse(args);
}
catch (Exception e)
{
muse_parser.parse(new String[0]).run();
return;
}
try
{
command.run();
}
catch (Exception e)
{
System.out.println(String.format("Command failed due to a %s.\n%s", e.getClass().getSimpleName(), e.getMessage()));
e.printStackTrace(System.err);
}
}
static
...
|
77bda2ce09b8d397a25c4bff83268aa3d8ec187b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="comt",
version="2.6.0",
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
long_description=open('ABOUT.rst').read(),
author='Abilian SAS',
author_email='[email protected]',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['setuptools'],
zip_safe=False,
)
|
from setuptools import setup, find_packages
setup(
name="comt",
use_scm_version=True,
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
long_description=open('ABOUT.rst').read(),
author='Abilian SAS',
author_email='[email protected]',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['setuptools'],
zip_safe=False,
)
|
Use git tag for package version.
|
Use git tag for package version.
|
Python
|
agpl-3.0
|
co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name="comt",
version="2.6.0",
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
long_description=open('ABOUT.rst').read(),
author='Abilian SAS',
author_email='[email protected]',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['setuptools'],
zip_safe=False,
)
## Instruction:
Use git tag for package version.
## Code After:
from setuptools import setup, find_packages
setup(
name="comt",
use_scm_version=True,
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
long_description=open('ABOUT.rst').read(),
author='Abilian SAS',
author_email='[email protected]',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['setuptools'],
zip_safe=False,
)
|
# ... existing code ...
setup(
name="comt",
use_scm_version=True,
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
# ... rest of the code ...
|
19af60831293ccff759e7ad9afb2336d1e232b02
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
Add i3pystatus.weather to packages list
|
Add i3pystatus.weather to packages list
|
Python
|
mit
|
asmikhailov/i3pystatus,drwahl/i3pystatus,enkore/i3pystatus,facetoe/i3pystatus,teto/i3pystatus,m45t3r/i3pystatus,Arvedui/i3pystatus,fmarchenko/i3pystatus,eBrnd/i3pystatus,yang-ling/i3pystatus,schroeji/i3pystatus,yang-ling/i3pystatus,ncoop/i3pystatus,ncoop/i3pystatus,asmikhailov/i3pystatus,fmarchenko/i3pystatus,enkore/i3pystatus,schroeji/i3pystatus,teto/i3pystatus,richese/i3pystatus,m45t3r/i3pystatus,facetoe/i3pystatus,richese/i3pystatus,drwahl/i3pystatus,Arvedui/i3pystatus,eBrnd/i3pystatus
|
python
|
## Code Before:
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
## Instruction:
Add i3pystatus.weather to packages list
## Code After:
from setuptools import setup
setup(name="i3pystatus",
version="3.34",
description="A complete replacement for i3status",
url="http://github.com/enkore/i3pystatus",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Topic :: Desktop Environment :: Window Managers",
],
packages=[
"i3pystatus",
"i3pystatus.core",
"i3pystatus.tools",
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
"i3pystatus = i3pystatus:main",
"i3pystatus-setting-util = i3pystatus.tools.setting_util:main"
]
},
zip_safe=True,
)
|
# ... existing code ...
"i3pystatus.mail",
"i3pystatus.pulseaudio",
"i3pystatus.updates",
"i3pystatus.weather",
],
entry_points={
"console_scripts": [
# ... rest of the code ...
|
dd7513f4146679d11aff6d528f11927131dc692f
|
feder/monitorings/factories.py
|
feder/monitorings/factories.py
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
Add description and template to MonitoringFactory
|
Add description and template to MonitoringFactory
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
python
|
## Code Before:
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
## Instruction:
Add description and template to MonitoringFactory
## Code After:
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
...
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
...
|
03eec7510c82387c34f7d4bf0646727b0931b7b5
|
kotlin/mapFilterReduce.kt
|
kotlin/mapFilterReduce.kt
|
// Map, Filter, Reduce
fun main(args:Array<String>){
var result =
// 1. Generating sequence
Array(1000000, {i->i})
// 2. Mapping the sequence into another
.map { n -> n * 2}
// 3. Filtering the sequence
.filter { n -> n%3 == 0}
// 4. Reducing the sequence
.fold(0L) { x, y -> x.toLong() + y.toLong()}
// As a result
println(result.toString())
}
|
// Map, Filter, Reduce
// To increse VM heap size, set environment variable JAVA_OPTS
// (For example: 'export JAVA_OPTS="$JAVA_OPTS -Xmx1024M"')
fun main(args:Array<String>){
var start = System.currentTimeMillis()
var result =
// 1. Generating sequence
Array(10000000, {i->i})
// 2. Mapping the sequence into another
.map { n -> n * 2}
// 3. Filtering the sequence
.filter { n -> n%3 == 0}
// 4. Reducing the sequence
.fold(0L) { x, y -> x.toLong() + y.toLong()}
var end = System.currentTimeMillis()
// As a result
println(result.toString())
println((end-start).toString() + " [msec]")
}
|
Add timer to evaluate performance
|
Add timer to evaluate performance
|
Kotlin
|
mit
|
kuroyakov/minimum-examples,kuroyakov/minimum-examples,kuroyakov/minimum-examples,kuroyakov/minimum-examples,kuroyakov/minimum-examples,kuroyakov/minimum-examples
|
kotlin
|
## Code Before:
// Map, Filter, Reduce
fun main(args:Array<String>){
var result =
// 1. Generating sequence
Array(1000000, {i->i})
// 2. Mapping the sequence into another
.map { n -> n * 2}
// 3. Filtering the sequence
.filter { n -> n%3 == 0}
// 4. Reducing the sequence
.fold(0L) { x, y -> x.toLong() + y.toLong()}
// As a result
println(result.toString())
}
## Instruction:
Add timer to evaluate performance
## Code After:
// Map, Filter, Reduce
// To increse VM heap size, set environment variable JAVA_OPTS
// (For example: 'export JAVA_OPTS="$JAVA_OPTS -Xmx1024M"')
fun main(args:Array<String>){
var start = System.currentTimeMillis()
var result =
// 1. Generating sequence
Array(10000000, {i->i})
// 2. Mapping the sequence into another
.map { n -> n * 2}
// 3. Filtering the sequence
.filter { n -> n%3 == 0}
// 4. Reducing the sequence
.fold(0L) { x, y -> x.toLong() + y.toLong()}
var end = System.currentTimeMillis()
// As a result
println(result.toString())
println((end-start).toString() + " [msec]")
}
|
// ... existing code ...
// Map, Filter, Reduce
// To increse VM heap size, set environment variable JAVA_OPTS
// (For example: 'export JAVA_OPTS="$JAVA_OPTS -Xmx1024M"')
fun main(args:Array<String>){
var start = System.currentTimeMillis()
var result =
// 1. Generating sequence
Array(10000000, {i->i})
// 2. Mapping the sequence into another
.map { n -> n * 2}
// ... modified code ...
// 4. Reducing the sequence
.fold(0L) { x, y -> x.toLong() + y.toLong()}
var end = System.currentTimeMillis()
// As a result
println(result.toString())
println((end-start).toString() + " [msec]")
}
// ... rest of the code ...
|
fd5cad381e8b821bfabbefc9deb4b8a4531844f6
|
rnacentral_pipeline/rnacentral/notify/slack.py
|
rnacentral_pipeline/rnacentral/notify/slack.py
|
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
raise SystemExit("SLACK_WEBHOOK environment variable not defined")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
|
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
try:
from rnacentral_pipeline.secrets import SLACK_WEBHOOK
except:
raise SystemExit("SLACK_WEBHOOK environment variable not defined, and couldn't find a secrets file")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
|
Add a secrets file in rnac notify
|
Add a secrets file in rnac notify
Nextflow doesn't propagate environment variables from the profile into
the event handler closures. This is the simplest workaround for that.
secrets.py should be on the cluster and symlinked into
rnacentral_pipeline
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
python
|
## Code Before:
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
raise SystemExit("SLACK_WEBHOOK environment variable not defined")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
## Instruction:
Add a secrets file in rnac notify
Nextflow doesn't propagate environment variables from the profile into
the event handler closures. This is the simplest workaround for that.
secrets.py should be on the cluster and symlinked into
rnacentral_pipeline
## Code After:
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
try:
from rnacentral_pipeline.secrets import SLACK_WEBHOOK
except:
raise SystemExit("SLACK_WEBHOOK environment variable not defined, and couldn't find a secrets file")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
|
# ... existing code ...
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
try:
from rnacentral_pipeline.secrets import SLACK_WEBHOOK
except:
raise SystemExit("SLACK_WEBHOOK environment variable not defined, and couldn't find a secrets file")
if plain:
slack_json = {
# ... rest of the code ...
|
4b2d1f424a846ed3762100ed69ed3ff57b20e670
|
Code/src/main/java/nl/utwente/viskell/ui/components/SliderBlock.java
|
Code/src/main/java/nl/utwente/viskell/ui/components/SliderBlock.java
|
package nl.utwente.viskell.ui.components;
import javafx.fxml.FXML;
import javafx.scene.control.Slider;
import nl.utwente.viskell.ui.CustomUIPane;
/**
* An extension of ValueBlock.
* The value of this Block can be changed by dragging a slider.
* Ranges from 0 to 1 (both inclusive).
*/
public class SliderBlock extends ValueBlock {
@FXML protected Slider slider;
/**
* Constructs a new SliderBlock
* @param pane The parent pane this Block resides on.
*/
public SliderBlock(CustomUIPane pane) {
super(pane, pane.getEnvInstance().buildType("Fractional a => a"), "0.0", "SliderBlock");
slider.setValue(0.0);
setValue("0.0");
slider.valueProperty().addListener(ev -> {
setValue(String.format("%.5f", slider.getValue()));
this.initiateConnectionChanges();
});
}
}
|
package nl.utwente.viskell.ui.components;
import javafx.fxml.FXML;
import javafx.scene.control.Slider;
import nl.utwente.viskell.ui.CustomUIPane;
/**
* An extension of ValueBlock.
* The value of this Block can be changed by dragging a slider.
* Ranges from 0 to 1 (both inclusive).
*/
public class SliderBlock extends ValueBlock {
@FXML protected Slider slider;
/**
* Constructs a new SliderBlock
* @param pane The parent pane this Block resides on.
*/
public SliderBlock(CustomUIPane pane) {
super(pane, pane.getEnvInstance().buildType("Fractional a => a"), "0.0", "SliderBlock");
slider.setValue(0.0);
setValue("0.0");
slider.valueProperty().addListener(ev -> {
setValue(String.format(Locale.US, "%.5f", slider.getValue()));
this.initiateConnectionChanges();
});
}
}
|
Fix format of slider block floats to be always valid.
|
Fix format of slider block floats to be always valid.
|
Java
|
mit
|
wandernauta/viskell,viskell/viskell,andrewdavidmackenzie/viskell
|
java
|
## Code Before:
package nl.utwente.viskell.ui.components;
import javafx.fxml.FXML;
import javafx.scene.control.Slider;
import nl.utwente.viskell.ui.CustomUIPane;
/**
* An extension of ValueBlock.
* The value of this Block can be changed by dragging a slider.
* Ranges from 0 to 1 (both inclusive).
*/
public class SliderBlock extends ValueBlock {
@FXML protected Slider slider;
/**
* Constructs a new SliderBlock
* @param pane The parent pane this Block resides on.
*/
public SliderBlock(CustomUIPane pane) {
super(pane, pane.getEnvInstance().buildType("Fractional a => a"), "0.0", "SliderBlock");
slider.setValue(0.0);
setValue("0.0");
slider.valueProperty().addListener(ev -> {
setValue(String.format("%.5f", slider.getValue()));
this.initiateConnectionChanges();
});
}
}
## Instruction:
Fix format of slider block floats to be always valid.
## Code After:
package nl.utwente.viskell.ui.components;
import javafx.fxml.FXML;
import javafx.scene.control.Slider;
import nl.utwente.viskell.ui.CustomUIPane;
/**
* An extension of ValueBlock.
* The value of this Block can be changed by dragging a slider.
* Ranges from 0 to 1 (both inclusive).
*/
public class SliderBlock extends ValueBlock {
@FXML protected Slider slider;
/**
* Constructs a new SliderBlock
* @param pane The parent pane this Block resides on.
*/
public SliderBlock(CustomUIPane pane) {
super(pane, pane.getEnvInstance().buildType("Fractional a => a"), "0.0", "SliderBlock");
slider.setValue(0.0);
setValue("0.0");
slider.valueProperty().addListener(ev -> {
setValue(String.format(Locale.US, "%.5f", slider.getValue()));
this.initiateConnectionChanges();
});
}
}
|
// ... existing code ...
setValue("0.0");
slider.valueProperty().addListener(ev -> {
setValue(String.format(Locale.US, "%.5f", slider.getValue()));
this.initiateConnectionChanges();
});
}
// ... rest of the code ...
|
293b1d492cfd3c5542c78acffbbdebf4933b6d85
|
django_db_geventpool/backends/postgresql_psycopg2/creation.py
|
django_db_geventpool/backends/postgresql_psycopg2/creation.py
|
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreation(OriginalDatabaseCreation):
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity)
|
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreation(OriginalDatabaseCreation):
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity)
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber)
|
Handle open connections when creating the test database
|
Handle open connections when creating the test database
|
Python
|
apache-2.0
|
jneight/django-db-geventpool,PreppyLLC-opensource/django-db-geventpool
|
python
|
## Code Before:
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreation(OriginalDatabaseCreation):
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity)
## Instruction:
Handle open connections when creating the test database
## Code After:
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreation(OriginalDatabaseCreation):
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity)
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber)
|
# ... existing code ...
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreation, self)._destroy_test_db(test_database_name, verbosity)
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreation, self)._create_test_db(verbosity, autoclobber)
# ... rest of the code ...
|
275c51e0d9b70fbae84360d2dceb0c29abdc9524
|
src/main/java/org/sonar/plugins/xquery/api/XQueryConstants.java
|
src/main/java/org/sonar/plugins/xquery/api/XQueryConstants.java
|
/*
* © 2013 by Intellectual Reserve, Inc. All rights reserved.
*/
package org.sonar.plugins.xquery.api;
public interface XQueryConstants {
static String XQUERY_LANGUAGE_KEY = "xqy";
static String FILE_EXTENSIONS_KEY = "sonar.marklogic.fileExtensions";
static String SOURCE_DIRECTORY_KEY = "sonar.marklogic.sourceDirectory";
static String XQTEST_REPORTS_DIRECTORY_KEY = "sonar.xqtest.reportsPath";
static String XQUERY_LANGUAGE_NAME = "XQuery";
static String[] DEFAULT_FILE_EXTENSIONS = { "xqy", "xquery", "xq" };
static String DEFAULT_FILE_EXTENSIONS_STRING = "xqy, xquery, xq";
static String DEFAULT_SOURCE_DIRECTORY = "src/main/xquery";
static String DEFAULT_XQTEST_DIRECTORY = "target/xqtest-reports";
}
|
/*
* © 2013 by Intellectual Reserve, Inc. All rights reserved.
*/
package org.sonar.plugins.xquery.api;
public interface XQueryConstants {
static String XQUERY_LANGUAGE_KEY = "xquery";
static String FILE_EXTENSIONS_KEY = "sonar.marklogic.fileExtensions";
static String SOURCE_DIRECTORY_KEY = "sonar.marklogic.sourceDirectory";
static String XQTEST_REPORTS_DIRECTORY_KEY = "sonar.xqtest.reportsPath";
static String XQUERY_LANGUAGE_NAME = "XQuery";
static String[] DEFAULT_FILE_EXTENSIONS = { "xqy", "xquery", "xq" , "xqi", "xql", "xqm", "xqws"};
static String DEFAULT_FILE_EXTENSIONS_STRING = "xqy, xquery, xq, xqi, xql, xqm, xqws";
static String DEFAULT_SOURCE_DIRECTORY = "src/main/xquery";
static String DEFAULT_XQTEST_DIRECTORY = "target/xqtest-reports";
}
|
Change language key to something more appropriate
|
Change language key to something more appropriate
|
Java
|
apache-2.0
|
malteseduck/sonar-xquery-plugin
|
java
|
## Code Before:
/*
* © 2013 by Intellectual Reserve, Inc. All rights reserved.
*/
package org.sonar.plugins.xquery.api;
public interface XQueryConstants {
static String XQUERY_LANGUAGE_KEY = "xqy";
static String FILE_EXTENSIONS_KEY = "sonar.marklogic.fileExtensions";
static String SOURCE_DIRECTORY_KEY = "sonar.marklogic.sourceDirectory";
static String XQTEST_REPORTS_DIRECTORY_KEY = "sonar.xqtest.reportsPath";
static String XQUERY_LANGUAGE_NAME = "XQuery";
static String[] DEFAULT_FILE_EXTENSIONS = { "xqy", "xquery", "xq" };
static String DEFAULT_FILE_EXTENSIONS_STRING = "xqy, xquery, xq";
static String DEFAULT_SOURCE_DIRECTORY = "src/main/xquery";
static String DEFAULT_XQTEST_DIRECTORY = "target/xqtest-reports";
}
## Instruction:
Change language key to something more appropriate
## Code After:
/*
* © 2013 by Intellectual Reserve, Inc. All rights reserved.
*/
package org.sonar.plugins.xquery.api;
public interface XQueryConstants {
static String XQUERY_LANGUAGE_KEY = "xquery";
static String FILE_EXTENSIONS_KEY = "sonar.marklogic.fileExtensions";
static String SOURCE_DIRECTORY_KEY = "sonar.marklogic.sourceDirectory";
static String XQTEST_REPORTS_DIRECTORY_KEY = "sonar.xqtest.reportsPath";
static String XQUERY_LANGUAGE_NAME = "XQuery";
static String[] DEFAULT_FILE_EXTENSIONS = { "xqy", "xquery", "xq" , "xqi", "xql", "xqm", "xqws"};
static String DEFAULT_FILE_EXTENSIONS_STRING = "xqy, xquery, xq, xqi, xql, xqm, xqws";
static String DEFAULT_SOURCE_DIRECTORY = "src/main/xquery";
static String DEFAULT_XQTEST_DIRECTORY = "target/xqtest-reports";
}
|
// ... existing code ...
public interface XQueryConstants {
static String XQUERY_LANGUAGE_KEY = "xquery";
static String FILE_EXTENSIONS_KEY = "sonar.marklogic.fileExtensions";
static String SOURCE_DIRECTORY_KEY = "sonar.marklogic.sourceDirectory";
static String XQTEST_REPORTS_DIRECTORY_KEY = "sonar.xqtest.reportsPath";
static String XQUERY_LANGUAGE_NAME = "XQuery";
static String[] DEFAULT_FILE_EXTENSIONS = { "xqy", "xquery", "xq" , "xqi", "xql", "xqm", "xqws"};
static String DEFAULT_FILE_EXTENSIONS_STRING = "xqy, xquery, xq, xqi, xql, xqm, xqws";
static String DEFAULT_SOURCE_DIRECTORY = "src/main/xquery";
static String DEFAULT_XQTEST_DIRECTORY = "target/xqtest-reports";
}
// ... rest of the code ...
|
df790275ba9f06296f800ecd913eca8393c300c6
|
psyparse/handler/base_handler.py
|
psyparse/handler/base_handler.py
|
class BaseHandler(object):
"""
An abstract hanlder class to help define how a handler should behave. No
methods are actually implemented and will raise a not-implemented error
if an instance of a handler subclass does not implement any of the
following methods.
"""
def new(self, entry):
"""Create a new entry"""
raise ("""'new' method not defined in handler subclass""")
def update(self, entry, attribute, new_value):
"""Update a given entry. This is useful when properties of a given
entry are only discovered sometime later in parsing."""
raise("""'update' method not defined in handler subclass""")
|
class BaseHandler(object):
"""
An abstract hanlder class to help define how a handler should behave. No
methods are actually implemented and will raise a not-implemented error
if an instance of a handler subclass does not implement any of the
following methods.
"""
def new(self, entry):
"""Create a new entry"""
raise Exception("""'new' method not defined in handler subclass""")
def update(self, entry, attribute, new_value):
"""Update a given entry. This is useful when properties of a given
entry are only discovered sometime later in parsing."""
raise Exception("""'update' method not defined in handler subclass""")
|
Fix bug in exception throwing (it caused an exception!).
|
Fix bug in exception throwing (it caused an exception!).
|
Python
|
mit
|
tnez/PsyParse
|
python
|
## Code Before:
class BaseHandler(object):
"""
An abstract hanlder class to help define how a handler should behave. No
methods are actually implemented and will raise a not-implemented error
if an instance of a handler subclass does not implement any of the
following methods.
"""
def new(self, entry):
"""Create a new entry"""
raise ("""'new' method not defined in handler subclass""")
def update(self, entry, attribute, new_value):
"""Update a given entry. This is useful when properties of a given
entry are only discovered sometime later in parsing."""
raise("""'update' method not defined in handler subclass""")
## Instruction:
Fix bug in exception throwing (it caused an exception!).
## Code After:
class BaseHandler(object):
"""
An abstract hanlder class to help define how a handler should behave. No
methods are actually implemented and will raise a not-implemented error
if an instance of a handler subclass does not implement any of the
following methods.
"""
def new(self, entry):
"""Create a new entry"""
raise Exception("""'new' method not defined in handler subclass""")
def update(self, entry, attribute, new_value):
"""Update a given entry. This is useful when properties of a given
entry are only discovered sometime later in parsing."""
raise Exception("""'update' method not defined in handler subclass""")
|
# ... existing code ...
"""
def new(self, entry):
"""Create a new entry"""
raise Exception("""'new' method not defined in handler subclass""")
def update(self, entry, attribute, new_value):
"""Update a given entry. This is useful when properties of a given
entry are only discovered sometime later in parsing."""
raise Exception("""'update' method not defined in handler subclass""")
# ... rest of the code ...
|
1aa266b2b3406f046e00b65bb747a19c6d4445d7
|
src/cons.h
|
src/cons.h
|
namespace mclisp
{
class ConsCell
{
public:
ConsCell(): car(nullptr), cdr(nullptr) {}
ConsCell(ConsCell* car, ConsCell* cdr): car(car), cdr(cdr) {}
ConsCell* car;
ConsCell* cdr;
};
bool operator==(const ConsCell& lhs, const ConsCell& rhs);
bool operator!=(const ConsCell& lhs, const ConsCell& rhs);
bool operator< (const ConsCell& lhs, const ConsCell& rhs);
bool operator> (const ConsCell& lhs, const ConsCell& rhs);
bool operator<=(const ConsCell& lhs, const ConsCell& rhs);
bool operator>=(const ConsCell& lhs, const ConsCell& rhs);
std::ostream& operator<<(std::ostream& os, const ConsCell& cons);
extern const ConsCell* kNil;
extern const ConsCell* kT;
void HackToFixNil();
const ConsCell* MakeSymbol(const std::string& name);
const ConsCell* MakeCons(const ConsCell* car, const ConsCell* cdr);
inline bool Symbolp(const ConsCell* c);
inline bool Consp(const ConsCell* c);
const std::string SymbolName(const ConsCell* symbol);
} // namespace mclisp
#endif // MCLISP_CONS_H_
|
namespace mclisp
{
struct ConsCell
{
ConsCell* car;
ConsCell* cdr;
};
typedef struct ConsCell ConsCell;
bool operator==(const ConsCell& lhs, const ConsCell& rhs);
bool operator!=(const ConsCell& lhs, const ConsCell& rhs);
bool operator< (const ConsCell& lhs, const ConsCell& rhs);
bool operator> (const ConsCell& lhs, const ConsCell& rhs);
bool operator<=(const ConsCell& lhs, const ConsCell& rhs);
bool operator>=(const ConsCell& lhs, const ConsCell& rhs);
std::ostream& operator<<(std::ostream& os, const ConsCell& cons);
extern const ConsCell* kNil;
extern const ConsCell* kT;
void HackToFixNil();
const ConsCell* MakeSymbol(const std::string& name);
const ConsCell* MakeCons(const ConsCell* car, const ConsCell* cdr);
inline bool Symbolp(const ConsCell* c);
inline bool Consp(const ConsCell* c);
const std::string SymbolName(const ConsCell* symbol);
} // namespace mclisp
#endif // MCLISP_CONS_H_
|
Convert ConsCell from class to POD.
|
Convert ConsCell from class to POD.
|
C
|
mit
|
appleby/mccarthy-lisp,appleby/mccarthy-lisp,appleby/mccarthy-lisp,appleby/mccarthy-lisp,appleby/mccarthy-lisp
|
c
|
## Code Before:
namespace mclisp
{
class ConsCell
{
public:
ConsCell(): car(nullptr), cdr(nullptr) {}
ConsCell(ConsCell* car, ConsCell* cdr): car(car), cdr(cdr) {}
ConsCell* car;
ConsCell* cdr;
};
bool operator==(const ConsCell& lhs, const ConsCell& rhs);
bool operator!=(const ConsCell& lhs, const ConsCell& rhs);
bool operator< (const ConsCell& lhs, const ConsCell& rhs);
bool operator> (const ConsCell& lhs, const ConsCell& rhs);
bool operator<=(const ConsCell& lhs, const ConsCell& rhs);
bool operator>=(const ConsCell& lhs, const ConsCell& rhs);
std::ostream& operator<<(std::ostream& os, const ConsCell& cons);
extern const ConsCell* kNil;
extern const ConsCell* kT;
void HackToFixNil();
const ConsCell* MakeSymbol(const std::string& name);
const ConsCell* MakeCons(const ConsCell* car, const ConsCell* cdr);
inline bool Symbolp(const ConsCell* c);
inline bool Consp(const ConsCell* c);
const std::string SymbolName(const ConsCell* symbol);
} // namespace mclisp
#endif // MCLISP_CONS_H_
## Instruction:
Convert ConsCell from class to POD.
## Code After:
namespace mclisp
{
struct ConsCell
{
ConsCell* car;
ConsCell* cdr;
};
typedef struct ConsCell ConsCell;
bool operator==(const ConsCell& lhs, const ConsCell& rhs);
bool operator!=(const ConsCell& lhs, const ConsCell& rhs);
bool operator< (const ConsCell& lhs, const ConsCell& rhs);
bool operator> (const ConsCell& lhs, const ConsCell& rhs);
bool operator<=(const ConsCell& lhs, const ConsCell& rhs);
bool operator>=(const ConsCell& lhs, const ConsCell& rhs);
std::ostream& operator<<(std::ostream& os, const ConsCell& cons);
extern const ConsCell* kNil;
extern const ConsCell* kT;
void HackToFixNil();
const ConsCell* MakeSymbol(const std::string& name);
const ConsCell* MakeCons(const ConsCell* car, const ConsCell* cdr);
inline bool Symbolp(const ConsCell* c);
inline bool Consp(const ConsCell* c);
const std::string SymbolName(const ConsCell* symbol);
} // namespace mclisp
#endif // MCLISP_CONS_H_
|
...
namespace mclisp
{
struct ConsCell
{
ConsCell* car;
ConsCell* cdr;
};
typedef struct ConsCell ConsCell;
bool operator==(const ConsCell& lhs, const ConsCell& rhs);
bool operator!=(const ConsCell& lhs, const ConsCell& rhs);
...
|
c8a7b9acc6c66a44eeb9ceac91587bb8ad08ad89
|
pagedown/utils.py
|
pagedown/utils.py
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
Use `django.templatetags.static`to load the file
|
Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25
|
Python
|
bsd-3-clause
|
timmyomahony/django-pagedown,timmyomahony/django-pagedown,timmyomahony/django-pagedown
|
python
|
## Code Before:
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
## Instruction:
Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25
## Code After:
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
// ... existing code ...
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
// ... rest of the code ...
|
cad23e7c73a8f33b7aa841d89d5311030d1c2262
|
databridge/helpers.py
|
databridge/helpers.py
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
Change adapter; fix filter func
|
Change adapter; fix filter func
|
Python
|
apache-2.0
|
yshalenyk/databridge
|
python
|
## Code Before:
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
## Instruction:
Change adapter; fix filter func
## Code After:
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
// ... existing code ...
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
// ... modified code ...
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
// ... rest of the code ...
|
5b6f92eb05e4ba75fa82ed63027c939c76def0cd
|
webapi-api/src/main/java/valandur/webapi/api/server/IServerService.java
|
webapi-api/src/main/java/valandur/webapi/api/server/IServerService.java
|
package valandur.webapi.api.server;
import java.util.List;
import java.util.Map;
/**
* This service provides information about the minecraft server.
*/
public interface IServerService {
/**
* Gets the properties that are in the server.properties file of the minecraft server.
* @return The properties of the minecraft server.
*/
Map<String, String> getProperties();
/**
* Sets a property for the minecraft server.
* @param key The key of the property.
* @param value The new value of the property.
*/
void setProperty(String key, String value);
/**
* Gets a history of the average TPS of the minecraft server.
* @return A list containing measurements of the TPS.
*/
List<IServerStat<Double>> getAverageTps();
/**
* Gets a history of the amount of players that were online on the minecraft server.
* @return A list containing measurements of the amount of players online.
*/
List<IServerStat<Integer>> getOnlinePlayers();
}
|
package valandur.webapi.api.server;
import java.util.List;
/**
* This service provides information about the minecraft server.
*/
public interface IServerService {
/**
* Gets a history of the average TPS of the minecraft server.
* @return A list containing measurements of the TPS.
*/
List<IServerStat<Double>> getAverageTps();
/**
* Gets a history of the amount of players that were online on the minecraft server.
* @return A list containing measurements of the amount of players online.
*/
List<IServerStat<Integer>> getOnlinePlayers();
}
|
Remove unused methods from server service interface
|
fix(server-properties): Remove unused methods from server service interface
|
Java
|
mit
|
Valandur/Web-API,Valandur/Web-API,Valandur/Web-API
|
java
|
## Code Before:
package valandur.webapi.api.server;
import java.util.List;
import java.util.Map;
/**
* This service provides information about the minecraft server.
*/
public interface IServerService {
/**
* Gets the properties that are in the server.properties file of the minecraft server.
* @return The properties of the minecraft server.
*/
Map<String, String> getProperties();
/**
* Sets a property for the minecraft server.
* @param key The key of the property.
* @param value The new value of the property.
*/
void setProperty(String key, String value);
/**
* Gets a history of the average TPS of the minecraft server.
* @return A list containing measurements of the TPS.
*/
List<IServerStat<Double>> getAverageTps();
/**
* Gets a history of the amount of players that were online on the minecraft server.
* @return A list containing measurements of the amount of players online.
*/
List<IServerStat<Integer>> getOnlinePlayers();
}
## Instruction:
fix(server-properties): Remove unused methods from server service interface
## Code After:
package valandur.webapi.api.server;
import java.util.List;
/**
* This service provides information about the minecraft server.
*/
public interface IServerService {
/**
* Gets a history of the average TPS of the minecraft server.
* @return A list containing measurements of the TPS.
*/
List<IServerStat<Double>> getAverageTps();
/**
* Gets a history of the amount of players that were online on the minecraft server.
* @return A list containing measurements of the amount of players online.
*/
List<IServerStat<Integer>> getOnlinePlayers();
}
|
// ... existing code ...
package valandur.webapi.api.server;
import java.util.List;
/**
* This service provides information about the minecraft server.
*/
public interface IServerService {
/**
* Gets a history of the average TPS of the minecraft server.
// ... rest of the code ...
|
25db9110d34760118b47b2bdf637cf6947154c2c
|
tests/unit/distributed/test_objectstore.py
|
tests/unit/distributed/test_objectstore.py
|
import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
|
import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
|
Test json file with api key is in API service class
|
Test json file with api key is in API service class
|
Python
|
mit
|
a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen
|
python
|
## Code Before:
import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
## Instruction:
Test json file with api key is in API service class
## Code After:
import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
|
# ... existing code ...
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
# ... rest of the code ...
|
8c8fbb8c3cf53ce0b193926fc89e426fb360eb81
|
database_import.py
|
database_import.py
|
import sys
import csv
from sqlalchemy.exc import IntegrityError
from openledger.models import db, Image
filename = sys.argv[1]
fields = ('ImageID', 'Subset', 'OriginalURL', 'OriginalLandingURL', 'License',
'AuthorProfileURL', 'Author', 'Title')
with open(filename) as csvfile:
db.create_all()
reader = csv.DictReader(csvfile)
for row in reader:
image = Image()
image.google_imageid = row['ImageID']
image.image_url = row['OriginalURL']
image.original_landing_url = row['OriginalLandingURL']
image.license_url = row['License']
image.author_url = row['AuthorProfileURL']
image.author = row['Author']
image.title = row['Title']
db.session.add(image)
try:
db.session.commit()
print("Adding image ", row['ImageID'])
except IntegrityError:
db.session.rollback()
|
import csv
import argparse
from sqlalchemy.exc import IntegrityError
from openledger.models import db, Image
def import_from_open_images(filename):
fields = ('ImageID', 'Subset', 'OriginalURL', 'OriginalLandingURL', 'License',
'AuthorProfileURL', 'Author', 'Title')
with open(filename) as csvfile:
db.create_all()
reader = csv.DictReader(csvfile)
for row in reader:
image = Image()
image.google_imageid = row['ImageID']
image.image_url = row['OriginalURL']
image.original_landing_url = row['OriginalLandingURL']
image.license_url = row['License']
image.author_url = row['AuthorProfileURL']
image.author = row['Author']
image.title = row['Title']
db.session.add(image)
try:
db.session.commit()
print("Adding image ", row['ImageID'])
except IntegrityError:
db.session.rollback()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--open-images-path",
dest="openimages_path",
help="The location of the Google Open Images csv file")
parser.add_argument("--flickr-100m-path",
dest="flickr100m_path",
help="The location of the Flickr 100M tsv directory")
args = parser.parse_args()
if args.openimages_path:
import_from_open_images(args.openimages_path)
|
Tidy up database import to take arguments for multiple sources
|
Tidy up database import to take arguments for multiple sources
|
Python
|
mit
|
creativecommons/open-ledger,creativecommons/open-ledger,creativecommons/open-ledger
|
python
|
## Code Before:
import sys
import csv
from sqlalchemy.exc import IntegrityError
from openledger.models import db, Image
filename = sys.argv[1]
fields = ('ImageID', 'Subset', 'OriginalURL', 'OriginalLandingURL', 'License',
'AuthorProfileURL', 'Author', 'Title')
with open(filename) as csvfile:
db.create_all()
reader = csv.DictReader(csvfile)
for row in reader:
image = Image()
image.google_imageid = row['ImageID']
image.image_url = row['OriginalURL']
image.original_landing_url = row['OriginalLandingURL']
image.license_url = row['License']
image.author_url = row['AuthorProfileURL']
image.author = row['Author']
image.title = row['Title']
db.session.add(image)
try:
db.session.commit()
print("Adding image ", row['ImageID'])
except IntegrityError:
db.session.rollback()
## Instruction:
Tidy up database import to take arguments for multiple sources
## Code After:
import csv
import argparse
from sqlalchemy.exc import IntegrityError
from openledger.models import db, Image
def import_from_open_images(filename):
fields = ('ImageID', 'Subset', 'OriginalURL', 'OriginalLandingURL', 'License',
'AuthorProfileURL', 'Author', 'Title')
with open(filename) as csvfile:
db.create_all()
reader = csv.DictReader(csvfile)
for row in reader:
image = Image()
image.google_imageid = row['ImageID']
image.image_url = row['OriginalURL']
image.original_landing_url = row['OriginalLandingURL']
image.license_url = row['License']
image.author_url = row['AuthorProfileURL']
image.author = row['Author']
image.title = row['Title']
db.session.add(image)
try:
db.session.commit()
print("Adding image ", row['ImageID'])
except IntegrityError:
db.session.rollback()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--open-images-path",
dest="openimages_path",
help="The location of the Google Open Images csv file")
parser.add_argument("--flickr-100m-path",
dest="flickr100m_path",
help="The location of the Flickr 100M tsv directory")
args = parser.parse_args()
if args.openimages_path:
import_from_open_images(args.openimages_path)
|
// ... existing code ...
import csv
import argparse
from sqlalchemy.exc import IntegrityError
from openledger.models import db, Image
def import_from_open_images(filename):
fields = ('ImageID', 'Subset', 'OriginalURL', 'OriginalLandingURL', 'License',
'AuthorProfileURL', 'Author', 'Title')
with open(filename) as csvfile:
db.create_all()
reader = csv.DictReader(csvfile)
for row in reader:
image = Image()
image.google_imageid = row['ImageID']
image.image_url = row['OriginalURL']
image.original_landing_url = row['OriginalLandingURL']
image.license_url = row['License']
image.author_url = row['AuthorProfileURL']
image.author = row['Author']
image.title = row['Title']
db.session.add(image)
try:
db.session.commit()
print("Adding image ", row['ImageID'])
except IntegrityError:
db.session.rollback()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--open-images-path",
dest="openimages_path",
help="The location of the Google Open Images csv file")
parser.add_argument("--flickr-100m-path",
dest="flickr100m_path",
help="The location of the Flickr 100M tsv directory")
args = parser.parse_args()
if args.openimages_path:
import_from_open_images(args.openimages_path)
// ... rest of the code ...
|
13a0c8b822582f84ff393298b13cf1e43642f825
|
tests/test_stacks_file.py
|
tests/test_stacks_file.py
|
import json
from dmaws.stacks import Stack
from dmaws.context import Context
def is_true(x):
assert x
def is_in(a, b):
assert a in b
def valid_stack_json(stack):
text = stack.build('stage', 'env', {}).template_body
template = json.loads(text)
assert 'Parameters' in template
assert set(template['Parameters']) == set(stack.parameters)
assert 'Resources' in template
def test_stack_definitions():
ctx = Context()
ctx.load_stacks('stacks.yml')
yield('Found stacks in the stacks.yml',
is_true, any(isinstance(s, Stack) for s in ctx.stacks.values()))
yield('Found groups in stacks.yml',
is_true, any(isinstance(s, list) for s in ctx.stacks.values()))
for name, stack in ctx.stacks.items():
if isinstance(stack, list):
for s in stack:
yield('Stack "%s" in group %s is defined' % (s, name),
is_in, s, ctx.stacks)
else:
for s in stack.dependencies:
yield('%s dependency "%s" is defined' % (name, s),
is_in, s, ctx.stacks)
yield('Stack "%s" template_body is valid JSON' % name,
valid_stack_json, stack)
|
import os
import json
from dmaws.stacks import Stack
from dmaws.context import Context
def is_true(x):
assert x
def is_in(a, b):
assert a in b
def valid_stack_json(ctx, stack):
text = stack.build('stage', 'env', ctx.variables).template_body
template = json.loads(text)
assert 'Parameters' in template
assert set(template['Parameters']) == set(stack.parameters)
assert 'Resources' in template
def _load_default_vars(ctx):
default_vars_files = [
'vars/common.yml',
]
if os.path.exists('vars/user.yml'):
default_vars_files.append('vars/user.yml')
ctx.load_variables(files=default_vars_files)
def test_stack_definitions():
ctx = Context()
_load_default_vars(ctx)
ctx.load_stacks('stacks.yml')
yield('Found stacks in the stacks.yml',
is_true, any(isinstance(s, Stack) for s in ctx.stacks.values()))
yield('Found groups in stacks.yml',
is_true, any(isinstance(s, list) for s in ctx.stacks.values()))
for name, stack in ctx.stacks.items():
if isinstance(stack, list):
for s in stack:
yield('Stack "%s" in group %s is defined' % (s, name),
is_in, s, ctx.stacks)
else:
for s in stack.dependencies:
yield('%s dependency "%s" is defined' % (name, s),
is_in, s, ctx.stacks)
yield('Stack "%s" template_body is valid JSON' % name,
valid_stack_json, ctx, stack)
|
Load default var files when testing template JSON
|
Load default var files when testing template JSON
Since template Jinja processing has access to the template variables
we need to load the default files when testing the JSON output.
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
python
|
## Code Before:
import json
from dmaws.stacks import Stack
from dmaws.context import Context
def is_true(x):
assert x
def is_in(a, b):
assert a in b
def valid_stack_json(stack):
text = stack.build('stage', 'env', {}).template_body
template = json.loads(text)
assert 'Parameters' in template
assert set(template['Parameters']) == set(stack.parameters)
assert 'Resources' in template
def test_stack_definitions():
ctx = Context()
ctx.load_stacks('stacks.yml')
yield('Found stacks in the stacks.yml',
is_true, any(isinstance(s, Stack) for s in ctx.stacks.values()))
yield('Found groups in stacks.yml',
is_true, any(isinstance(s, list) for s in ctx.stacks.values()))
for name, stack in ctx.stacks.items():
if isinstance(stack, list):
for s in stack:
yield('Stack "%s" in group %s is defined' % (s, name),
is_in, s, ctx.stacks)
else:
for s in stack.dependencies:
yield('%s dependency "%s" is defined' % (name, s),
is_in, s, ctx.stacks)
yield('Stack "%s" template_body is valid JSON' % name,
valid_stack_json, stack)
## Instruction:
Load default var files when testing template JSON
Since template Jinja processing has access to the template variables
we need to load the default files when testing the JSON output.
## Code After:
import os
import json
from dmaws.stacks import Stack
from dmaws.context import Context
def is_true(x):
assert x
def is_in(a, b):
assert a in b
def valid_stack_json(ctx, stack):
text = stack.build('stage', 'env', ctx.variables).template_body
template = json.loads(text)
assert 'Parameters' in template
assert set(template['Parameters']) == set(stack.parameters)
assert 'Resources' in template
def _load_default_vars(ctx):
default_vars_files = [
'vars/common.yml',
]
if os.path.exists('vars/user.yml'):
default_vars_files.append('vars/user.yml')
ctx.load_variables(files=default_vars_files)
def test_stack_definitions():
ctx = Context()
_load_default_vars(ctx)
ctx.load_stacks('stacks.yml')
yield('Found stacks in the stacks.yml',
is_true, any(isinstance(s, Stack) for s in ctx.stacks.values()))
yield('Found groups in stacks.yml',
is_true, any(isinstance(s, list) for s in ctx.stacks.values()))
for name, stack in ctx.stacks.items():
if isinstance(stack, list):
for s in stack:
yield('Stack "%s" in group %s is defined' % (s, name),
is_in, s, ctx.stacks)
else:
for s in stack.dependencies:
yield('%s dependency "%s" is defined' % (name, s),
is_in, s, ctx.stacks)
yield('Stack "%s" template_body is valid JSON' % name,
valid_stack_json, ctx, stack)
|
# ... existing code ...
import os
import json
from dmaws.stacks import Stack
# ... modified code ...
assert a in b
def valid_stack_json(ctx, stack):
text = stack.build('stage', 'env', ctx.variables).template_body
template = json.loads(text)
assert 'Parameters' in template
...
assert 'Resources' in template
def _load_default_vars(ctx):
default_vars_files = [
'vars/common.yml',
]
if os.path.exists('vars/user.yml'):
default_vars_files.append('vars/user.yml')
ctx.load_variables(files=default_vars_files)
def test_stack_definitions():
ctx = Context()
_load_default_vars(ctx)
ctx.load_stacks('stacks.yml')
yield('Found stacks in the stacks.yml',
...
yield('%s dependency "%s" is defined' % (name, s),
is_in, s, ctx.stacks)
yield('Stack "%s" template_body is valid JSON' % name,
valid_stack_json, ctx, stack)
# ... rest of the code ...
|
01ef95fcf2ce75ce934d8537b8e93b87fe11ef44
|
microservice-commons/src/main/java/mib/microservice/commons/kafka/KafkaProducerDefaultProperties.java
|
microservice-commons/src/main/java/mib/microservice/commons/kafka/KafkaProducerDefaultProperties.java
|
package mib.microservice.commons.kafka;
import java.util.Map;
import java.util.Properties;
/**
* Default/fallback config properties for a kafka producer
*
* @author knittl
*
*/
public class KafkaProducerDefaultProperties extends Properties {
private static final long serialVersionUID = 1L;
public KafkaProducerDefaultProperties() {
this.put("serializer.class", "mib.microservices.util.JsonEncoder");
this.put("key.serializer.class", "kafka.serializer.StringEncoder");
this.put("partitioner.class", "kafka.producer.DefaultPartitioner");
this.put("request.required.acks", "1");
}
public KafkaProducerDefaultProperties(Map<String, String> props) {
this();
this.putAll(props);
}
}
|
package mib.microservice.commons.kafka;
import java.util.Map;
import java.util.Properties;
/**
* Default/fallback config properties for a kafka producer
*
* @author knittl
*
*/
public class KafkaProducerDefaultProperties extends Properties {
private static final long serialVersionUID = 1L;
public KafkaProducerDefaultProperties() {
this.put("serializer.class", JsonEncoder.class.getName());
this.put("key.serializer.class", kafka.serializer.StringEncoder.class.getName());
this.put("partitioner.class", kafka.producer.DefaultPartitioner.class.getName());
this.put("request.required.acks", "1");
}
public KafkaProducerDefaultProperties(Map<String, String> props) {
this();
this.putAll(props);
}
}
|
Use .class.getName() instead of hardcoded strings in default properties
|
Use .class.getName() instead of hardcoded strings in default properties
|
Java
|
apache-2.0
|
mpopp/MIB
|
java
|
## Code Before:
package mib.microservice.commons.kafka;
import java.util.Map;
import java.util.Properties;
/**
* Default/fallback config properties for a kafka producer
*
* @author knittl
*
*/
public class KafkaProducerDefaultProperties extends Properties {
private static final long serialVersionUID = 1L;
public KafkaProducerDefaultProperties() {
this.put("serializer.class", "mib.microservices.util.JsonEncoder");
this.put("key.serializer.class", "kafka.serializer.StringEncoder");
this.put("partitioner.class", "kafka.producer.DefaultPartitioner");
this.put("request.required.acks", "1");
}
public KafkaProducerDefaultProperties(Map<String, String> props) {
this();
this.putAll(props);
}
}
## Instruction:
Use .class.getName() instead of hardcoded strings in default properties
## Code After:
package mib.microservice.commons.kafka;
import java.util.Map;
import java.util.Properties;
/**
* Default/fallback config properties for a kafka producer
*
* @author knittl
*
*/
public class KafkaProducerDefaultProperties extends Properties {
private static final long serialVersionUID = 1L;
public KafkaProducerDefaultProperties() {
this.put("serializer.class", JsonEncoder.class.getName());
this.put("key.serializer.class", kafka.serializer.StringEncoder.class.getName());
this.put("partitioner.class", kafka.producer.DefaultPartitioner.class.getName());
this.put("request.required.acks", "1");
}
public KafkaProducerDefaultProperties(Map<String, String> props) {
this();
this.putAll(props);
}
}
|
# ... existing code ...
private static final long serialVersionUID = 1L;
public KafkaProducerDefaultProperties() {
this.put("serializer.class", JsonEncoder.class.getName());
this.put("key.serializer.class", kafka.serializer.StringEncoder.class.getName());
this.put("partitioner.class", kafka.producer.DefaultPartitioner.class.getName());
this.put("request.required.acks", "1");
}
# ... rest of the code ...
|
f89f8e139999bb15003fa866ab0bae2fc21f7136
|
hamcrest-library/src/main/java/org/hamcrest/text/MatchesPattern.java
|
hamcrest-library/src/main/java/org/hamcrest/text/MatchesPattern.java
|
package org.hamcrest.text;
import java.util.regex.Pattern;
import org.hamcrest.Description;
import org.hamcrest.Factory;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
public class MatchesPattern extends TypeSafeMatcher<String> {
private final Pattern pattern;
public MatchesPattern(Pattern pattern) {
this.pattern = pattern;
}
@Override
protected boolean matchesSafely(String item) {
return pattern.matcher(item).matches();
}
@Override
public void describeTo(Description description) {
description.appendText("a string matching the pattern '" + pattern + "'");
}
@Factory
public static Matcher<String> matchesPattern(Pattern pattern) {
return new MatchesPattern(pattern);
}
@Factory
public static Matcher<String> matchesPattern(String regex) {
return new MatchesPattern(Pattern.compile(regex));
}
}
|
package org.hamcrest.text;
import java.util.regex.Pattern;
import org.hamcrest.Description;
import org.hamcrest.Factory;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
public class MatchesPattern extends TypeSafeMatcher<String> {
private final Pattern pattern;
public MatchesPattern(Pattern pattern) {
this.pattern = pattern;
}
@Override
protected boolean matchesSafely(String item) {
return pattern.matcher(item).matches();
}
@Override
public void describeTo(Description description) {
description.appendText("a string matching the pattern '" + pattern + "'");
}
/**
* Creates a matcher of {@link String} that matches when the examined string
* exactly matches the given {@link Pattern}.
*/
@Factory
public static Matcher<String> matchesPattern(Pattern pattern) {
return new MatchesPattern(pattern);
}
/**
* Creates a matcher of {@link String} that matches when the examined string
* exactly matches the given regular expression, treated as a {@link Pattern}.
*/
@Factory
public static Matcher<String> matchesPattern(String regex) {
return new MatchesPattern(Pattern.compile(regex));
}
}
|
Add Javadoc for factory methods.
|
Add Javadoc for factory methods.
|
Java
|
bsd-3-clause
|
rysh/JavaHamcrest,dkomanov/JavaHamcrest,vizewang/JavaHamcrest,alb-i986/JavaHamcrest,akruijff/JavaHamcrest,josephw/JavaHamcrest,plow/JavaHamcrest,mmichaelis/JavaHamcrest,vizewang/JavaHamcrest,alb-i986/JavaHamcrest,mmichaelis/JavaHamcrest,jforge/JavaHamcrest,josephw/JavaHamcrest,wgpshashank/JavaHamcrest,rysh/JavaHamcrest,tomwhoiscontrary/JavaHamcrest,akruijff/JavaHamcrest,wgpshashank/JavaHamcrest,mnk/hamcrest,jforge/JavaHamcrest,plow/JavaHamcrest,dkomanov/JavaHamcrest,mnk/hamcrest,tomwhoiscontrary/JavaHamcrest
|
java
|
## Code Before:
package org.hamcrest.text;
import java.util.regex.Pattern;
import org.hamcrest.Description;
import org.hamcrest.Factory;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
public class MatchesPattern extends TypeSafeMatcher<String> {
private final Pattern pattern;
public MatchesPattern(Pattern pattern) {
this.pattern = pattern;
}
@Override
protected boolean matchesSafely(String item) {
return pattern.matcher(item).matches();
}
@Override
public void describeTo(Description description) {
description.appendText("a string matching the pattern '" + pattern + "'");
}
@Factory
public static Matcher<String> matchesPattern(Pattern pattern) {
return new MatchesPattern(pattern);
}
@Factory
public static Matcher<String> matchesPattern(String regex) {
return new MatchesPattern(Pattern.compile(regex));
}
}
## Instruction:
Add Javadoc for factory methods.
## Code After:
package org.hamcrest.text;
import java.util.regex.Pattern;
import org.hamcrest.Description;
import org.hamcrest.Factory;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
public class MatchesPattern extends TypeSafeMatcher<String> {
private final Pattern pattern;
public MatchesPattern(Pattern pattern) {
this.pattern = pattern;
}
@Override
protected boolean matchesSafely(String item) {
return pattern.matcher(item).matches();
}
@Override
public void describeTo(Description description) {
description.appendText("a string matching the pattern '" + pattern + "'");
}
/**
* Creates a matcher of {@link String} that matches when the examined string
* exactly matches the given {@link Pattern}.
*/
@Factory
public static Matcher<String> matchesPattern(Pattern pattern) {
return new MatchesPattern(pattern);
}
/**
* Creates a matcher of {@link String} that matches when the examined string
* exactly matches the given regular expression, treated as a {@link Pattern}.
*/
@Factory
public static Matcher<String> matchesPattern(String regex) {
return new MatchesPattern(Pattern.compile(regex));
}
}
|
// ... existing code ...
description.appendText("a string matching the pattern '" + pattern + "'");
}
/**
* Creates a matcher of {@link String} that matches when the examined string
* exactly matches the given {@link Pattern}.
*/
@Factory
public static Matcher<String> matchesPattern(Pattern pattern) {
return new MatchesPattern(pattern);
}
/**
* Creates a matcher of {@link String} that matches when the examined string
* exactly matches the given regular expression, treated as a {@link Pattern}.
*/
@Factory
public static Matcher<String> matchesPattern(String regex) {
return new MatchesPattern(Pattern.compile(regex));
// ... rest of the code ...
|
d8b4acd0617dc93646e177ac56b0205be1b7ff88
|
seaweb_project/seaweb_project/urls.py
|
seaweb_project/seaweb_project/urls.py
|
from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
|
from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
|
Disable indexing via robots.txt url.
|
Disable indexing via robots.txt url.
|
Python
|
mit
|
grollins/sea-web-django
|
python
|
## Code Before:
from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
## Instruction:
Disable indexing via robots.txt url.
## Code After:
from django.conf.urls import patterns, url, include
from django.views.generic.base import TemplateView
from django.contrib.auth.decorators import login_required
from django.contrib.auth import views as auth_views
from django.contrib import admin
admin.autodiscover()
from rest_framework.routers import DefaultRouter
from jobs.views import JobViewSet, UserViewSet, ResultViewSet
router = DefaultRouter()
router.register(r'jobs', JobViewSet, base_name='job')
router.register(r'results', ResultViewSet)
router.register(r'users', UserViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^login/$', 'jobs.views.login', name='login')
)
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
|
# ... existing code ...
urlpatterns += patterns('backend.views',
url(r'^admin/', include(admin.site.urls))
)
urlpatterns = patterns('',
(r'^robots\.txt$',
lambda r: HttpResponse("User-agent: *\nDisallow: /", mimetype="text/plain"))
)
# ... rest of the code ...
|
a4c8fc3e7f5624f8dcaffcf1ed6b8ba8f8bd9861
|
include/uwhd/sync/ModelSync.h
|
include/uwhd/sync/ModelSync.h
|
//===-- ModelSync.h - Wireless Syncing of GameModel ---------------- c++ --===//
//
// UWH Timer
//
// This file is distributed under the BSD 3-Clause License.
// See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef MODELSYNC_H
#define MODELSYNC_H
#include "uwhd/model/GameModel.h"
#include <string>
#include <vector>
struct ModelSync {
virtual void Init() = 0;
virtual void setMgr(GameModelManager *M) = 0;
virtual GameModelManager &getMgr() = 0;
};
//struct ModelSyncPeer {
// virtual std::string name() const = 0;
// virtual std::string addr() const = 0;
//};
struct ModelSyncServer : public ModelSync {
//virtual std::vector<ModelSyncPeer*> peers() = 0;
//virtual void claimPeer(const ModelSyncPeer *P) = 0;
//virtual void getPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void setPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void adjustPeerWallClock(const ModelSyncPeer *P, int Delta) = 0;
};
ModelSync *CreateSocketServer(const std::string &Port);
ModelSync *CreateSocketClient(const std::string &Host, const std::string &Port);
ModelSyncServer *CreateXBeeSyncServer();
ModelSync *CreateXBeeSyncClient();
#endif
|
//===-- ModelSync.h - Wireless Syncing of GameModel ---------------- c++ --===//
//
// UWH Timer
//
// This file is distributed under the BSD 3-Clause License.
// See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef MODELSYNC_H
#define MODELSYNC_H
#include "uwhd/model/GameModel.h"
#include <string>
#include <vector>
struct ModelSync {
virtual ~ModelSync() = 0;
virtual void Init() = 0;
virtual void setMgr(GameModelManager *M) = 0;
virtual GameModelManager &getMgr() = 0;
};
//struct ModelSyncPeer {
// virtual std::string name() const = 0;
// virtual std::string addr() const = 0;
//};
struct ModelSyncServer : public ModelSync {
virtual ~ModelSyncServer() = 0;
//virtual std::vector<ModelSyncPeer*> peers() = 0;
//virtual void claimPeer(const ModelSyncPeer *P) = 0;
//virtual void getPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void setPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void adjustPeerWallClock(const ModelSyncPeer *P, int Delta) = 0;
};
ModelSync *CreateSocketServer(const std::string &Port);
ModelSync *CreateSocketClient(const std::string &Host, const std::string &Port);
ModelSyncServer *CreateXBeeSyncServer();
ModelSync *CreateXBeeSyncClient();
#endif
|
Address a warning from the generated swig code
|
[sync] Address a warning from the generated swig code
|
C
|
bsd-3-clause
|
Navisjon/uwh-display,jroelofs/uwh-display,jroelofs/uwh-display,jroelofs/uwh-display,Navisjon/uwh-display,Navisjon/uwh-display,Navisjon/uwh-display
|
c
|
## Code Before:
//===-- ModelSync.h - Wireless Syncing of GameModel ---------------- c++ --===//
//
// UWH Timer
//
// This file is distributed under the BSD 3-Clause License.
// See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef MODELSYNC_H
#define MODELSYNC_H
#include "uwhd/model/GameModel.h"
#include <string>
#include <vector>
struct ModelSync {
virtual void Init() = 0;
virtual void setMgr(GameModelManager *M) = 0;
virtual GameModelManager &getMgr() = 0;
};
//struct ModelSyncPeer {
// virtual std::string name() const = 0;
// virtual std::string addr() const = 0;
//};
struct ModelSyncServer : public ModelSync {
//virtual std::vector<ModelSyncPeer*> peers() = 0;
//virtual void claimPeer(const ModelSyncPeer *P) = 0;
//virtual void getPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void setPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void adjustPeerWallClock(const ModelSyncPeer *P, int Delta) = 0;
};
ModelSync *CreateSocketServer(const std::string &Port);
ModelSync *CreateSocketClient(const std::string &Host, const std::string &Port);
ModelSyncServer *CreateXBeeSyncServer();
ModelSync *CreateXBeeSyncClient();
#endif
## Instruction:
[sync] Address a warning from the generated swig code
## Code After:
//===-- ModelSync.h - Wireless Syncing of GameModel ---------------- c++ --===//
//
// UWH Timer
//
// This file is distributed under the BSD 3-Clause License.
// See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#ifndef MODELSYNC_H
#define MODELSYNC_H
#include "uwhd/model/GameModel.h"
#include <string>
#include <vector>
struct ModelSync {
virtual ~ModelSync() = 0;
virtual void Init() = 0;
virtual void setMgr(GameModelManager *M) = 0;
virtual GameModelManager &getMgr() = 0;
};
//struct ModelSyncPeer {
// virtual std::string name() const = 0;
// virtual std::string addr() const = 0;
//};
struct ModelSyncServer : public ModelSync {
virtual ~ModelSyncServer() = 0;
//virtual std::vector<ModelSyncPeer*> peers() = 0;
//virtual void claimPeer(const ModelSyncPeer *P) = 0;
//virtual void getPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void setPeerWallClock(const ModelSyncPeer *P) = 0;
//virtual void adjustPeerWallClock(const ModelSyncPeer *P, int Delta) = 0;
};
ModelSync *CreateSocketServer(const std::string &Port);
ModelSync *CreateSocketClient(const std::string &Host, const std::string &Port);
ModelSyncServer *CreateXBeeSyncServer();
ModelSync *CreateXBeeSyncClient();
#endif
|
# ... existing code ...
#include <vector>
struct ModelSync {
virtual ~ModelSync() = 0;
virtual void Init() = 0;
virtual void setMgr(GameModelManager *M) = 0;
virtual GameModelManager &getMgr() = 0;
# ... modified code ...
//};
struct ModelSyncServer : public ModelSync {
virtual ~ModelSyncServer() = 0;
//virtual std::vector<ModelSyncPeer*> peers() = 0;
//virtual void claimPeer(const ModelSyncPeer *P) = 0;
//virtual void getPeerWallClock(const ModelSyncPeer *P) = 0;
# ... rest of the code ...
|
bb97137951699f51641fd504a2aaa56d269ae6bd
|
test/idempotency/C005_ArrayInitialization.java
|
test/idempotency/C005_ArrayInitialization.java
|
import java.util.List;
class C005_ArrayInitialization {
{
String[] names = {"Reinier", "Roel"};
String[] names2 = new String[]{"Reinier", "Roel"};
String[] names3 = new java.lang.String[]{"Reinier", "Roel"};
List[] list1 = new List[0];
List<String>[] list2 = new List<String>[0];
List<String>[] list3 = new java.util.List<String>[0];
int[] sized = new int[0];
int[][] sizedTwoDimensions = new int[0][0];
int[][] sizedTwoDimensions2 = new int[0][];
int[][][] sizedThreeDimensions = new int[0][][];
int[][] empty = {{}};
int[][] ints = new int[][] {{}};
int[] singleInts = new int[] {};
int more[] = {};
int[] more2[] = {{}};
}
}
|
class C005_ArrayInitialization {
{
String[] names = {"Reinier", "Roel"};
String[] names2 = new String[] {"Reinier", "Roel"};
String[] names3 = new java.lang.String[] {"Reinier", "Roel"};
int[] sized = new int[0];
int[][] sizedTwoDimensions = new int[0][0];
int[][] sizedTwoDimensions2 = new int[0][];
int[][][] sizedThreeDimensions = new int[0][][];
int[][] empty = {{}};
int[][] ints = new int[][] {{}};
int[] singleInts = new int[] {};
int more[] = {};
int[] more2[] = {{}};
}
}
|
Make the ArrayInitialization test also compile...
|
Make the ArrayInitialization test also compile...
|
Java
|
mit
|
evant/android-retrolambda-lombok,rzwitserloot/lombok.ast,rzwitserloot/lombok.ast,evant/android-retrolambda-lombok,evant/android-retrolambda-lombok,komalsukhani/deb-lombok.ast,rzwitserloot/lombok.ast,komalsukhani/deb-lombok.ast,komalsukhani/deb-lombok.ast
|
java
|
## Code Before:
import java.util.List;
class C005_ArrayInitialization {
{
String[] names = {"Reinier", "Roel"};
String[] names2 = new String[]{"Reinier", "Roel"};
String[] names3 = new java.lang.String[]{"Reinier", "Roel"};
List[] list1 = new List[0];
List<String>[] list2 = new List<String>[0];
List<String>[] list3 = new java.util.List<String>[0];
int[] sized = new int[0];
int[][] sizedTwoDimensions = new int[0][0];
int[][] sizedTwoDimensions2 = new int[0][];
int[][][] sizedThreeDimensions = new int[0][][];
int[][] empty = {{}};
int[][] ints = new int[][] {{}};
int[] singleInts = new int[] {};
int more[] = {};
int[] more2[] = {{}};
}
}
## Instruction:
Make the ArrayInitialization test also compile...
## Code After:
class C005_ArrayInitialization {
{
String[] names = {"Reinier", "Roel"};
String[] names2 = new String[] {"Reinier", "Roel"};
String[] names3 = new java.lang.String[] {"Reinier", "Roel"};
int[] sized = new int[0];
int[][] sizedTwoDimensions = new int[0][0];
int[][] sizedTwoDimensions2 = new int[0][];
int[][][] sizedThreeDimensions = new int[0][][];
int[][] empty = {{}};
int[][] ints = new int[][] {{}};
int[] singleInts = new int[] {};
int more[] = {};
int[] more2[] = {{}};
}
}
|
...
class C005_ArrayInitialization {
{
String[] names = {"Reinier", "Roel"};
String[] names2 = new String[] {"Reinier", "Roel"};
String[] names3 = new java.lang.String[] {"Reinier", "Roel"};
int[] sized = new int[0];
int[][] sizedTwoDimensions = new int[0][0];
int[][] sizedTwoDimensions2 = new int[0][];
...
|
63431113fbb1d1d6793761e3cc30a8492df2f580
|
setup.py
|
setup.py
|
"""How to release a new version: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi"""
from businesstime import __version__
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='businesstime',
version=__version__,
author='SeatGeek',
author_email='[email protected]',
packages=['businesstime'],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
'Programming Language :: Python :: 2.7',
],
description='A simple utility for calculating business time aware timedeltas between two datetimes',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
tests_require=['nose'],
test_suite='nose.collector'
)
|
"""How to release a new version: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi"""
from businesstime import __version__
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='businesstime',
version=__version__,
author='SeatGeek',
author_email='[email protected]',
packages=[
'businesstime',
'businesstime.holidays',
],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
'Programming Language :: Python :: 2.7',
],
description='A simple utility for calculating business time aware timedeltas between two datetimes',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
tests_require=['nose'],
test_suite='nose.collector'
)
|
Add holidays module to packages list
|
Add holidays module to packages list
Closes #14
|
Python
|
bsd-2-clause
|
seatgeek/businesstime
|
python
|
## Code Before:
"""How to release a new version: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi"""
from businesstime import __version__
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='businesstime',
version=__version__,
author='SeatGeek',
author_email='[email protected]',
packages=['businesstime'],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
'Programming Language :: Python :: 2.7',
],
description='A simple utility for calculating business time aware timedeltas between two datetimes',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
tests_require=['nose'],
test_suite='nose.collector'
)
## Instruction:
Add holidays module to packages list
Closes #14
## Code After:
"""How to release a new version: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi"""
from businesstime import __version__
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='businesstime',
version=__version__,
author='SeatGeek',
author_email='[email protected]',
packages=[
'businesstime',
'businesstime.holidays',
],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
'Programming Language :: Python :: 2.7',
],
description='A simple utility for calculating business time aware timedeltas between two datetimes',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
tests_require=['nose'],
test_suite='nose.collector'
)
|
...
version=__version__,
author='SeatGeek',
author_email='[email protected]',
packages=[
'businesstime',
'businesstime.holidays',
],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
...
|
d01217875a1c720b3c6fabe05fd3b0c2b0d3b287
|
qtpy/QtWebEngineQuick.py
|
qtpy/QtWebEngineQuick.py
|
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
Replace generic PythonQtError with QtModuleNotInstalledError
|
Replace generic PythonQtError with QtModuleNotInstalledError
|
Python
|
mit
|
spyder-ide/qtpy
|
python
|
## Code Before:
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ImportError as error:
raise PythonQtError(
'The QtWebEngineQuick module was not found. '
'It needs to be installed separately for PyQt6.'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
## Instruction:
Replace generic PythonQtError with QtModuleNotInstalledError
## Code After:
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, PythonQtError
if PYQT5:
raise PythonQtError('QtWebEngineQuick not implemented in PyQt5')
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
from PySide6.QtWebEngineQuick import *
else:
raise PythonQtError('No Qt bindings could be found')
|
...
elif PYQT6:
try:
from PyQt6.QtWebEngineQuick import *
except ModuleNotFoundError as error:
raise QtModuleNotInstalledError(
name='QtWebEngineQuick', binding=API_NAME, missing_package='PyQt6-WebEngine'
) from error
elif PYSIDE2:
raise PythonQtError('QtWebEngineQuick not implemented in PySide2')
elif PYSIDE6:
...
|
b7559972bc28532108027784a05e8ffc43cb398a
|
tests/test_models.py
|
tests/test_models.py
|
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
|
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
|
Replace unittest.TestCase with Django’s own TestCase.
|
Replace unittest.TestCase with Django’s own TestCase.
|
Python
|
bsd-3-clause
|
mishbahr/djangocms-responsive-wrapper,mishbahr/djangocms-responsive-wrapper,mishbahr/djangocms-responsive-wrapper
|
python
|
## Code Before:
import os
import shutil
import unittest
from responsive_wrapper import models
class TestResponsive_wrapper(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
## Instruction:
Replace unittest.TestCase with Django’s own TestCase.
## Code After:
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
|
...
from django.conf import settings
from django.test import TestCase
from responsive_wrapper import models
class TestResponsive_wrapper(TestCase):
def setUp(self):
pass
...
|
7359740eaddf6e4c01ccd91fe1044932d019d8e3
|
Settings/Controls/Label.h
|
Settings/Controls/Label.h
|
class Label : public Control {
public:
Label() {
}
Label(int id, HWND parent) :
Control(id, parent) {
}
};
|
class Label : public Control {
public:
Label() {
}
Label(int id, HWND parent) :
Control(id, parent) {
}
Label(int id, DialogBase &parent, bool translate = true) :
Control(id, parent, false) {
}
};
|
Add a new-style constructor for labels
|
Add a new-style constructor for labels
|
C
|
bsd-2-clause
|
malensek/3RVX,malensek/3RVX,malensek/3RVX
|
c
|
## Code Before:
class Label : public Control {
public:
Label() {
}
Label(int id, HWND parent) :
Control(id, parent) {
}
};
## Instruction:
Add a new-style constructor for labels
## Code After:
class Label : public Control {
public:
Label() {
}
Label(int id, HWND parent) :
Control(id, parent) {
}
Label(int id, DialogBase &parent, bool translate = true) :
Control(id, parent, false) {
}
};
|
...
Control(id, parent) {
}
Label(int id, DialogBase &parent, bool translate = true) :
Control(id, parent, false) {
}
};
...
|
7527ce1b48f769d33eb5ede3d54413e51eb2ac12
|
senkumba/models.py
|
senkumba/models.py
|
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
|
from django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
Change titles for the site
|
Change titles for the site
|
Python
|
mit
|
lubegamark/senkumba
|
python
|
## Code Before:
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
## Instruction:
Change titles for the site
## Code After:
from django.contrib import admin
from django.contrib.auth.models import User
def user_new_str(self):
return self.username if self.get_full_name() == "" else self.get_full_name()
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
|
...
from django.contrib import admin
from django.contrib.auth.models import User
...
# Replace the __str__ method in the User class with our new implementation
User.__str__ = user_new_str
admin.site.site_header = 'SENKUMBA'
admin.site.site_title = 'SENKUMBA'
admin.site.index_title = 'SENKUMBA'
...
|
4f67141cfabe99af99434364e13fec91bef291a7
|
grip/constants.py
|
grip/constants.py
|
supported_extensions = ['.md', '.markdown']
# The default filenames when no file is provided
default_filenames = map(lambda ext: 'README' + ext, supported_extensions)
|
supported_extensions = [
'.markdown', '.mdown', '.mkdn', '.md',
'.textile',
'.rdoc',
'.org',
'.creole',
'.mediawiki', '.wiki',
'.rst',
'.asciidoc', '.adoc', '.asc',
'.pod',
]
# The default filenames when no file is provided
default_filenames = map(lambda ext: 'README' + ext, supported_extensions)
|
Add the GitHub-supported format extensions.
|
Add the GitHub-supported format extensions.
|
Python
|
mit
|
ssundarraj/grip,joeyespo/grip,ssundarraj/grip,jbarreras/grip,jbarreras/grip,joeyespo/grip,mgoddard-pivotal/grip,mgoddard-pivotal/grip
|
python
|
## Code Before:
supported_extensions = ['.md', '.markdown']
# The default filenames when no file is provided
default_filenames = map(lambda ext: 'README' + ext, supported_extensions)
## Instruction:
Add the GitHub-supported format extensions.
## Code After:
supported_extensions = [
'.markdown', '.mdown', '.mkdn', '.md',
'.textile',
'.rdoc',
'.org',
'.creole',
'.mediawiki', '.wiki',
'.rst',
'.asciidoc', '.adoc', '.asc',
'.pod',
]
# The default filenames when no file is provided
default_filenames = map(lambda ext: 'README' + ext, supported_extensions)
|
// ... existing code ...
supported_extensions = [
'.markdown', '.mdown', '.mkdn', '.md',
'.textile',
'.rdoc',
'.org',
'.creole',
'.mediawiki', '.wiki',
'.rst',
'.asciidoc', '.adoc', '.asc',
'.pod',
]
# The default filenames when no file is provided
default_filenames = map(lambda ext: 'README' + ext, supported_extensions)
// ... rest of the code ...
|
8541737b5a3a50188162349727a0d0230613e630
|
test/features/test_create_pages.py
|
test/features/test_create_pages.py
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
|
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
Change services number in test
|
Change services number in test
|
Python
|
mit
|
alphagov/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer
|
python
|
## Code Before:
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text, is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('654'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text, is_("All services"))
## Instruction:
Change services number in test
## Code After:
from hamcrest import *
from nose.tools import nottest
from test.features import BrowserTest
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
headlines = self.browser.find_by_css('.headline')
departments = headlines[0].text
services = headlines[1].text
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
|
# ... existing code ...
class test_create_pages(BrowserTest):
def test_about_page(self):
self.browser.visit("http://0.0.0.0:8000/high-volume-services/"
"by-transactions-per-year/descending")
assert_that(self.browser.find_by_css('h1').text,
is_('High-volume services'))
def test_home_page(self):
self.browser.visit("http://0.0.0.0:8000/home")
# ... modified code ...
transactions = headlines[2].text
assert_that(departments, contains_string('16'))
assert_that(services, contains_string('658'))
assert_that(transactions, contains_string('1.31bn'))
@nottest
...
def test_all_services(self):
self.browser.visit("http://0.0.0.0:8000/all-services")
assert_that(self.browser.find_by_css('h1').text, is_("All Services"))
assert_that(self.browser.find_by_css('#navigation .current').text,
is_("All services"))
# ... rest of the code ...
|
1ee1496439f4dcd654df0a1f119b05a8288e3dd2
|
query.py
|
query.py
|
from numpy import uint16
from numpy import bool_
from query_result_list import QueryResultList
from data_record import DataRecord
from has_actions import HasActions
class Query(DataRecord, HasActions):
def __init__(self, query_id, topic = None, result_list = None, user = None, condition = None, autocomplete = None, query_text = None):
DataRecord.__init__( self, uint16(query_id) )
self.topic = topic
self.result_list = result_list
self.user = user
self.condition = condition
self.autocomplete = bool_(autocomplete)
self.query_text = query_text
if result_list is None: self.result_list = QueryResultList(self)
def add_to_result_list( self, rank, document ):
self.result_list.add( rank, document )
def results_up_to_rank( self, rank ):
return self.result_list.results_up_to_rank( rank )
|
from numpy import uint16
from numpy import bool_
from query_result_list import QueryResultList
from data_record import DataRecord
from has_actions import HasActions
class Query(DataRecord, HasActions):
def __init__(self, query_id, topic = None, user = None, condition = None, autocomplete = None, query_text = None):
DataRecord.__init__( self, uint16(query_id) )
self.topic = topic
self.user = user
self.condition = condition
self.autocomplete = bool_(autocomplete)
self.query_text = query_text
self.result_list = QueryResultList(self)
def add_to_result_list( self, rank, document ):
self.result_list.add( rank, document )
def results_up_to_rank( self, rank ):
if int(rank) < 1 or int(rank) > self.result_list.length():
raise RuntimeError("Attempted to fetch results up to rank %s for query %s, which is impossible." % (rank, self.record_id))
return self.result_list.results_up_to_rank( rank )
|
Remove result list handling from Query constructor
|
Remove result list handling from Query constructor
|
Python
|
mit
|
fire-uta/iiix-data-parser
|
python
|
## Code Before:
from numpy import uint16
from numpy import bool_
from query_result_list import QueryResultList
from data_record import DataRecord
from has_actions import HasActions
class Query(DataRecord, HasActions):
def __init__(self, query_id, topic = None, result_list = None, user = None, condition = None, autocomplete = None, query_text = None):
DataRecord.__init__( self, uint16(query_id) )
self.topic = topic
self.result_list = result_list
self.user = user
self.condition = condition
self.autocomplete = bool_(autocomplete)
self.query_text = query_text
if result_list is None: self.result_list = QueryResultList(self)
def add_to_result_list( self, rank, document ):
self.result_list.add( rank, document )
def results_up_to_rank( self, rank ):
return self.result_list.results_up_to_rank( rank )
## Instruction:
Remove result list handling from Query constructor
## Code After:
from numpy import uint16
from numpy import bool_
from query_result_list import QueryResultList
from data_record import DataRecord
from has_actions import HasActions
class Query(DataRecord, HasActions):
def __init__(self, query_id, topic = None, user = None, condition = None, autocomplete = None, query_text = None):
DataRecord.__init__( self, uint16(query_id) )
self.topic = topic
self.user = user
self.condition = condition
self.autocomplete = bool_(autocomplete)
self.query_text = query_text
self.result_list = QueryResultList(self)
def add_to_result_list( self, rank, document ):
self.result_list.add( rank, document )
def results_up_to_rank( self, rank ):
if int(rank) < 1 or int(rank) > self.result_list.length():
raise RuntimeError("Attempted to fetch results up to rank %s for query %s, which is impossible." % (rank, self.record_id))
return self.result_list.results_up_to_rank( rank )
|
// ... existing code ...
class Query(DataRecord, HasActions):
def __init__(self, query_id, topic = None, user = None, condition = None, autocomplete = None, query_text = None):
DataRecord.__init__( self, uint16(query_id) )
self.topic = topic
self.user = user
self.condition = condition
self.autocomplete = bool_(autocomplete)
self.query_text = query_text
self.result_list = QueryResultList(self)
def add_to_result_list( self, rank, document ):
self.result_list.add( rank, document )
def results_up_to_rank( self, rank ):
if int(rank) < 1 or int(rank) > self.result_list.length():
raise RuntimeError("Attempted to fetch results up to rank %s for query %s, which is impossible." % (rank, self.record_id))
return self.result_list.results_up_to_rank( rank )
// ... rest of the code ...
|
dfc7c7ae72b91f3bc7724da6b0d8071b3e9253b7
|
altair/vegalite/v2/examples/us_state_capitals.py
|
altair/vegalite/v2/examples/us_state_capitals.py
|
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text().encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points
|
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points + points.mark_point(color='black')
|
Add points for capital locations>
|
Add points for capital locations>
|
Python
|
bsd-3-clause
|
ellisonbg/altair,jakevdp/altair,altair-viz/altair
|
python
|
## Code Before:
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text().encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points
## Instruction:
Add points for capital locations>
## Code After:
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points + points.mark_point(color='black')
|
// ... existing code ...
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points + points.mark_point(color='black')
// ... rest of the code ...
|
1c8f29d78d6409ba58df36d439f1ffd436c9dd10
|
gaphas/picklers.py
|
gaphas/picklers.py
|
from future import standard_library
standard_library.install_aliases()
import copyreg
# Allow instancemethod to be pickled:
import new
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return new.instancemethod(func, self, clazz)
def reduce_instancemethod(im):
return construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__)
copyreg.pickle(new.instancemethod, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
import cairo
def construct_cairo_matrix(*args):
return cairo.Matrix(*args)
def reduce_cairo_matrix(m):
return construct_cairo_matrix, tuple(m)
copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix)
# vim:sw=4:et:ai
|
from future import standard_library
standard_library.install_aliases()
import copyreg
# Allow instancemethod to be pickled:
import types
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return types.MethodType(func, self, clazz)
def reduce_instancemethod(im):
return construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__)
copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
import cairo
def construct_cairo_matrix(*args):
return cairo.Matrix(*args)
def reduce_cairo_matrix(m):
return construct_cairo_matrix, tuple(m)
copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix)
# vim:sw=4:et:ai
|
Fix no module 'new', replaced new.instancemethod with types.MethodType
|
Fix no module 'new', replaced new.instancemethod with types.MethodType
Signed-off-by: Dan Yeaw <[email protected]>
|
Python
|
lgpl-2.1
|
amolenaar/gaphas
|
python
|
## Code Before:
from future import standard_library
standard_library.install_aliases()
import copyreg
# Allow instancemethod to be pickled:
import new
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return new.instancemethod(func, self, clazz)
def reduce_instancemethod(im):
return construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__)
copyreg.pickle(new.instancemethod, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
import cairo
def construct_cairo_matrix(*args):
return cairo.Matrix(*args)
def reduce_cairo_matrix(m):
return construct_cairo_matrix, tuple(m)
copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix)
# vim:sw=4:et:ai
## Instruction:
Fix no module 'new', replaced new.instancemethod with types.MethodType
Signed-off-by: Dan Yeaw <[email protected]>
## Code After:
from future import standard_library
standard_library.install_aliases()
import copyreg
# Allow instancemethod to be pickled:
import types
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return types.MethodType(func, self, clazz)
def reduce_instancemethod(im):
return construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__)
copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
import cairo
def construct_cairo_matrix(*args):
return cairo.Matrix(*args)
def reduce_cairo_matrix(m):
return construct_cairo_matrix, tuple(m)
copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix)
# vim:sw=4:et:ai
|
# ... existing code ...
# Allow instancemethod to be pickled:
import types
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return types.MethodType(func, self, clazz)
def reduce_instancemethod(im):
return construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__)
copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
# ... rest of the code ...
|
6054ffdeccfc50bfe49f83c60b8a2a7fb35ee10d
|
control.c
|
control.c
|
void btoa(int num, char *buf, int digits) {
int shift = digits - 1;
int current = pow(2, shift);
char digit[2];
while (current > 0) {
sprintf(digit, "%d", ((num & current) >> shift) & 1);
strncat(buf, digit, 1);
shift--;
current /= 2;
}
strcat(buf, "\0");
}
int main(int argc, char *argv[]) {
// 0: device
// 1: group
// 2: plug
// 3: status
char device[] = "\\\\.\\\\COM5";
if (serial_connect(device) == SERIAL_ERROR) {
printf("Failed to connect to serial device \"%s\"\n", device);
return 1;
}
struct Packet packet = { 1, 0, 3 };
if (serial_transmit(packet) == SERIAL_ERROR) {
printf("Failed to send data to serial device \"%s\"\n", device);
return 1;
}
serial_close();
return 0;
}
|
void btoa(int num, char *buf, int digits) {
int shift = digits - 1;
int current = pow(2, shift);
char digit[2];
while (current > 0) {
sprintf(digit, "%d", ((num & current) >> shift) & 1);
strncat(buf, digit, 1);
shift--;
current /= 2;
}
strcat(buf, "\0");
}
int getvalue(char *text, char *name, int min, int max) {
char *end;
long result = strtol(text, &end, 10);
if (*end != '\0' || result < min || result > max) {
fprintf(stderr, "Invalid value for %s.\n", name);
return -1;
}
return result;
}
int main(int argc, char *argv[]) {
// 0: device
// 1: group
// 2: plug
// 3: status
char device[] = "\\\\.\\\\COM5";
if (serial_connect(device) == SERIAL_ERROR) {
printf("Failed to connect to serial device \"%s\"\n", device);
return 1;
}
struct Packet packet = { 1, 0, 3 };
if (serial_transmit(packet) == SERIAL_ERROR) {
printf("Failed to send data to serial device \"%s\"\n", device);
return 1;
}
serial_close();
return 0;
}
|
Add function to convert a string to an integer
|
Add function to convert a string to an integer
|
C
|
agpl-3.0
|
jackwilsdon/lightcontrol,jackwilsdon/lightcontrol
|
c
|
## Code Before:
void btoa(int num, char *buf, int digits) {
int shift = digits - 1;
int current = pow(2, shift);
char digit[2];
while (current > 0) {
sprintf(digit, "%d", ((num & current) >> shift) & 1);
strncat(buf, digit, 1);
shift--;
current /= 2;
}
strcat(buf, "\0");
}
int main(int argc, char *argv[]) {
// 0: device
// 1: group
// 2: plug
// 3: status
char device[] = "\\\\.\\\\COM5";
if (serial_connect(device) == SERIAL_ERROR) {
printf("Failed to connect to serial device \"%s\"\n", device);
return 1;
}
struct Packet packet = { 1, 0, 3 };
if (serial_transmit(packet) == SERIAL_ERROR) {
printf("Failed to send data to serial device \"%s\"\n", device);
return 1;
}
serial_close();
return 0;
}
## Instruction:
Add function to convert a string to an integer
## Code After:
void btoa(int num, char *buf, int digits) {
int shift = digits - 1;
int current = pow(2, shift);
char digit[2];
while (current > 0) {
sprintf(digit, "%d", ((num & current) >> shift) & 1);
strncat(buf, digit, 1);
shift--;
current /= 2;
}
strcat(buf, "\0");
}
int getvalue(char *text, char *name, int min, int max) {
char *end;
long result = strtol(text, &end, 10);
if (*end != '\0' || result < min || result > max) {
fprintf(stderr, "Invalid value for %s.\n", name);
return -1;
}
return result;
}
int main(int argc, char *argv[]) {
// 0: device
// 1: group
// 2: plug
// 3: status
char device[] = "\\\\.\\\\COM5";
if (serial_connect(device) == SERIAL_ERROR) {
printf("Failed to connect to serial device \"%s\"\n", device);
return 1;
}
struct Packet packet = { 1, 0, 3 };
if (serial_transmit(packet) == SERIAL_ERROR) {
printf("Failed to send data to serial device \"%s\"\n", device);
return 1;
}
serial_close();
return 0;
}
|
# ... existing code ...
}
strcat(buf, "\0");
}
int getvalue(char *text, char *name, int min, int max) {
char *end;
long result = strtol(text, &end, 10);
if (*end != '\0' || result < min || result > max) {
fprintf(stderr, "Invalid value for %s.\n", name);
return -1;
}
return result;
}
int main(int argc, char *argv[]) {
# ... rest of the code ...
|
a5f5231e8e55b7052e2525876b60f939598edc91
|
setup.py
|
setup.py
|
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="[email protected]",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
|
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="[email protected]",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
Mark as requiring at least Python 3.6
|
Mark as requiring at least Python 3.6
|
Python
|
mit
|
jingw/pyhdfs,jingw/pyhdfs
|
python
|
## Code Before:
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="[email protected]",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
)
## Instruction:
Mark as requiring at least Python 3.6
## Code After:
import os
import re
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
long_description = readme.read()
with open(os.path.join(os.path.dirname(__file__), 'pyhdfs', '__init__.py')) as py:
version_match = re.search(r"__version__ = '(.+?)'", py.read())
assert version_match
version = version_match.group(1)
with open(os.path.join(os.path.dirname(__file__), 'dev_requirements.txt')) as dev_requirements:
tests_require = dev_requirements.read().splitlines()
setup(
name="PyHDFS",
version=version,
description="Pure Python HDFS client",
long_description=long_description,
url='https://github.com/jingw/pyhdfs',
author="Jing Wang",
author_email="[email protected]",
license="MIT License",
packages=['pyhdfs'],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Filesystems",
],
install_requires=[
'requests',
'simplejson',
],
tests_require=tests_require,
package_data={
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
|
...
'': ['*.rst'],
'pyhdfs': ['py.typed']
},
python_requires='>=3.6',
)
...
|
94a128b7202c5629b77208a401222f3dcba92196
|
anonymoustre/main.py
|
anonymoustre/main.py
|
from functools import reduce
import pprint
import time
import shodan
import requests
import api_key
from google_api import query_google_api
from shodan_api import query_shodan_api
from mxtoolbox_api import query_mxtoolbox_api
from utils import assoc_default_score, combine_scores
pp = pprint.PrettyPrinter(indent=2)
def main():
start_time = time.time()
# No more than 10 requests
ips = ['103.245.153.70']
scored_ips = assoc_default_score(ips)
shodan_scores = query_shodan_api(ips)
google_scores = query_google_api(ips)
# Limited number of requests... Be careful
mx_toolbox_scores = query_mxtoolbox_api(ips)
results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores])
pp.pprint(results)
print("--------- %s seconds -------" % (time.time() - start_time))
return results
def get_some_ips():
req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips")
return [line for line in req.text.split('\n') if line and line[0].isdigit()]
def get_bad_ips():
with open("bad_ips.txt", "r") as file:
ips = list(filter(lambda line: line != '', file.read().split("\n")))
return ips
if __name__ == "__main__":
main()
|
from functools import reduce
import pprint
import time
import shodan
import requests
import api_key
from google_api import query_google_api
from shodan_api import query_shodan_api
from mxtoolbox_api import query_mxtoolbox_api
from utils import assoc_default_score, combine_scores
pp = pprint.PrettyPrinter(indent=2)
def main():
start_time = time.time()
# No more than 10 requests
ips = ['103.245.153.70']
scored_ips = assoc_default_score(ips)
shodan_scores = query_shodan_api(ips)
google_scores = query_google_api(ips)
# Limited number of requests... Be careful
# mx_toolbox_scores = query_mxtoolbox_api(ips)
results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores])
pp.pprint(results)
print("--------- %s seconds -------" % (time.time() - start_time))
return results
def get_some_ips():
req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips")
return [line for line in req.text.split('\n') if line and line[0].isdigit()]
def get_bad_ips():
with open("bad_ips.txt", "r") as file:
ips = list(filter(lambda line: line != '', file.read().split("\n")))
return ips
if __name__ == "__main__":
main()
|
Comment mxtoolbox because of limited requests
|
Comment mxtoolbox because of limited requests
|
Python
|
mit
|
Dominionized/anonymoustre,Dominionized/anonymoustre
|
python
|
## Code Before:
from functools import reduce
import pprint
import time
import shodan
import requests
import api_key
from google_api import query_google_api
from shodan_api import query_shodan_api
from mxtoolbox_api import query_mxtoolbox_api
from utils import assoc_default_score, combine_scores
pp = pprint.PrettyPrinter(indent=2)
def main():
start_time = time.time()
# No more than 10 requests
ips = ['103.245.153.70']
scored_ips = assoc_default_score(ips)
shodan_scores = query_shodan_api(ips)
google_scores = query_google_api(ips)
# Limited number of requests... Be careful
mx_toolbox_scores = query_mxtoolbox_api(ips)
results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores])
pp.pprint(results)
print("--------- %s seconds -------" % (time.time() - start_time))
return results
def get_some_ips():
req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips")
return [line for line in req.text.split('\n') if line and line[0].isdigit()]
def get_bad_ips():
with open("bad_ips.txt", "r") as file:
ips = list(filter(lambda line: line != '', file.read().split("\n")))
return ips
if __name__ == "__main__":
main()
## Instruction:
Comment mxtoolbox because of limited requests
## Code After:
from functools import reduce
import pprint
import time
import shodan
import requests
import api_key
from google_api import query_google_api
from shodan_api import query_shodan_api
from mxtoolbox_api import query_mxtoolbox_api
from utils import assoc_default_score, combine_scores
pp = pprint.PrettyPrinter(indent=2)
def main():
start_time = time.time()
# No more than 10 requests
ips = ['103.245.153.70']
scored_ips = assoc_default_score(ips)
shodan_scores = query_shodan_api(ips)
google_scores = query_google_api(ips)
# Limited number of requests... Be careful
# mx_toolbox_scores = query_mxtoolbox_api(ips)
results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores])
pp.pprint(results)
print("--------- %s seconds -------" % (time.time() - start_time))
return results
def get_some_ips():
req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips")
return [line for line in req.text.split('\n') if line and line[0].isdigit()]
def get_bad_ips():
with open("bad_ips.txt", "r") as file:
ips = list(filter(lambda line: line != '', file.read().split("\n")))
return ips
if __name__ == "__main__":
main()
|
# ... existing code ...
google_scores = query_google_api(ips)
# Limited number of requests... Be careful
# mx_toolbox_scores = query_mxtoolbox_api(ips)
results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores])
pp.pprint(results)
print("--------- %s seconds -------" % (time.time() - start_time))
# ... rest of the code ...
|
3db4d306c779ef3a84133dbbfc5614d514d72411
|
pi_gpio/handlers.py
|
pi_gpio/handlers.py
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinHttpManager
from pi_gpio import app
HTTP_MANAGER = PinHttpManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer
}
def pin_not_found(self):
return {'message': 'Pin not found'}, 404
class PinList(Pin):
def get(self):
result = HTTP_MANAGER.read_all()
return self.response(result, 200)
class PinDetail(Pin):
def get(self, pin_num):
result = HTTP_MANAGER.read_one(pin_num)
if not result:
return self.pin_not_found()
return self.response(result, 200)
def patch(self, pin_num):
self.parser.add_argument('value', type=int)
args = self.parser.parse_args()
result = HTTP_MANAGER.update_value(pin_num, args['value'])
if not result:
return self.pin_not_found()
return self.response(HTTP_MANAGER.read_one(pin_num), 200)
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinHttpManager
from pi_gpio import app
HTTP_MANAGER = PinHttpManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer,
"resistor": fields.String,
"initial": fields.String,
"event": fields.String,
"bounce": fields.Integer
}
def pin_not_found(self):
return {'message': 'Pin not found'}, 404
class PinList(Pin):
def get(self):
result = HTTP_MANAGER.read_all()
return self.response(result, 200)
class PinDetail(Pin):
def get(self, pin_num):
result = HTTP_MANAGER.read_one(pin_num)
if not result:
return self.pin_not_found()
return self.response(result, 200)
def patch(self, pin_num):
self.parser.add_argument('value', type=int)
args = self.parser.parse_args()
result = HTTP_MANAGER.update_value(pin_num, args['value'])
if not result:
return self.pin_not_found()
return self.response(HTTP_MANAGER.read_one(pin_num), 200)
|
Add new fields to response
|
Add new fields to response
|
Python
|
mit
|
projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server
|
python
|
## Code Before:
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinHttpManager
from pi_gpio import app
HTTP_MANAGER = PinHttpManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer
}
def pin_not_found(self):
return {'message': 'Pin not found'}, 404
class PinList(Pin):
def get(self):
result = HTTP_MANAGER.read_all()
return self.response(result, 200)
class PinDetail(Pin):
def get(self, pin_num):
result = HTTP_MANAGER.read_one(pin_num)
if not result:
return self.pin_not_found()
return self.response(result, 200)
def patch(self, pin_num):
self.parser.add_argument('value', type=int)
args = self.parser.parse_args()
result = HTTP_MANAGER.update_value(pin_num, args['value'])
if not result:
return self.pin_not_found()
return self.response(HTTP_MANAGER.read_one(pin_num), 200)
## Instruction:
Add new fields to response
## Code After:
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinHttpManager
from pi_gpio import app
HTTP_MANAGER = PinHttpManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer,
"resistor": fields.String,
"initial": fields.String,
"event": fields.String,
"bounce": fields.Integer
}
def pin_not_found(self):
return {'message': 'Pin not found'}, 404
class PinList(Pin):
def get(self):
result = HTTP_MANAGER.read_all()
return self.response(result, 200)
class PinDetail(Pin):
def get(self, pin_num):
result = HTTP_MANAGER.read_one(pin_num)
if not result:
return self.pin_not_found()
return self.response(result, 200)
def patch(self, pin_num):
self.parser.add_argument('value', type=int)
args = self.parser.parse_args()
result = HTTP_MANAGER.update_value(pin_num, args['value'])
if not result:
return self.pin_not_found()
return self.response(HTTP_MANAGER.read_one(pin_num), 200)
|
// ... existing code ...
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer,
"resistor": fields.String,
"initial": fields.String,
"event": fields.String,
"bounce": fields.Integer
}
def pin_not_found(self):
// ... rest of the code ...
|
74976f231130b843f555a8f73df18ffb4d89639e
|
test/CodeGen/debug-info-line2.c
|
test/CodeGen/debug-info-line2.c
|
// RUN: %clang_cc1 -triple x86_64-darwin-apple -g -emit-llvm -o - %s | FileCheck %s
// Radar 9199234
int bar();
int foo(int i) {
int j = 0;
if (i) {
j = bar();
//CHECK: store i32
//CHECK-NOT: br label %{{%[a-zA-Z0-9\.]+}}, !dbg
}
else
{
j = bar() + 2;
}
return j;
}
|
// RUN: %clang_cc1 -triple x86_64-darwin-apple -g -emit-llvm -o - %s | FileCheck %s
// Radar 9199234
int bar();
int foo(int i) {
int j = 0;
if (i) {
j = bar();
}
else
{
//CHECK: store i32 %add
//CHECK-NOT: br label %{{[a-zA-Z0-9\.]+}}, !dbg
j = bar() + 2;
}
return j;
}
|
Fix regexp for this test to properly check.
|
Fix regexp for this test to properly check.
Patch by Eli Bendersky.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@160385 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -triple x86_64-darwin-apple -g -emit-llvm -o - %s | FileCheck %s
// Radar 9199234
int bar();
int foo(int i) {
int j = 0;
if (i) {
j = bar();
//CHECK: store i32
//CHECK-NOT: br label %{{%[a-zA-Z0-9\.]+}}, !dbg
}
else
{
j = bar() + 2;
}
return j;
}
## Instruction:
Fix regexp for this test to properly check.
Patch by Eli Bendersky.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@160385 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -triple x86_64-darwin-apple -g -emit-llvm -o - %s | FileCheck %s
// Radar 9199234
int bar();
int foo(int i) {
int j = 0;
if (i) {
j = bar();
}
else
{
//CHECK: store i32 %add
//CHECK-NOT: br label %{{[a-zA-Z0-9\.]+}}, !dbg
j = bar() + 2;
}
return j;
}
|
# ... existing code ...
int j = 0;
if (i) {
j = bar();
}
else
{
//CHECK: store i32 %add
//CHECK-NOT: br label %{{[a-zA-Z0-9\.]+}}, !dbg
j = bar() + 2;
}
return j;
# ... rest of the code ...
|
b6b9a926704ffe570bd4cedf6cabd9920dc41cad
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
setup(
name='yamlicious',
packages=find_packages(),
scripts=[os.path.join('bin', p) for p in os.listdir('bin')],
)
|
from setuptools import setup, find_packages
import os
setup(
name='yamlicious',
packages=find_packages(),
scripts=[os.path.join('bin', p) for p in os.listdir('bin')],
install_requires=[
'pyyaml',
]
)
|
Add YAML as a dep.
|
Add YAML as a dep.
|
Python
|
bsd-2-clause
|
derrley/yamlicious,derrley/yamlicious
|
python
|
## Code Before:
from setuptools import setup, find_packages
import os
setup(
name='yamlicious',
packages=find_packages(),
scripts=[os.path.join('bin', p) for p in os.listdir('bin')],
)
## Instruction:
Add YAML as a dep.
## Code After:
from setuptools import setup, find_packages
import os
setup(
name='yamlicious',
packages=find_packages(),
scripts=[os.path.join('bin', p) for p in os.listdir('bin')],
install_requires=[
'pyyaml',
]
)
|
...
name='yamlicious',
packages=find_packages(),
scripts=[os.path.join('bin', p) for p in os.listdir('bin')],
install_requires=[
'pyyaml',
]
)
...
|
260c45281a6d645e1f4401eef8cbe0d473cf7dfa
|
app/src/main/java/it/inaf/android/DateFormatter.java
|
app/src/main/java/it/inaf/android/DateFormatter.java
|
/* Copyright (c) 2014 Andrea Zoli. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
package it.inaf.android;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Locale;
public class DateFormatter {
public static String format(String date)
{
SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z");
java.util.Date tmpDate = null;
try {
tmpDate = format.parse(date);
} catch(ParseException e) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
try {
tmpDate = format.parse(date);
} catch (ParseException e1) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
try {
tmpDate = format.parse(date);
} catch (ParseException e2) {
e2.printStackTrace();
}
}
}
SimpleDateFormat postFormater = new SimpleDateFormat("dd.MM.yyyy", Locale.ITALY);
return postFormater.format(tmpDate);
}
}
|
/* Copyright (c) 2014 Andrea Zoli. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
package it.inaf.android;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Locale;
public class DateFormatter {
public static String format(String date)
{
SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z", Locale.UK);
java.util.Date tmpDate = null;
try {
tmpDate = format.parse(date);
} catch(ParseException e) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.UK);
try {
tmpDate = format.parse(date);
} catch (ParseException e1) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.UK);
try {
tmpDate = format.parse(date);
} catch (ParseException e2) {
e2.printStackTrace();
}
}
}
SimpleDateFormat postFormater = new SimpleDateFormat("dd.MM.yyyy", Locale.ITALY);
return postFormater.format(tmpDate);
}
}
|
Use Locale.UK while parsing dates.
|
Use Locale.UK while parsing dates.
|
Java
|
bsd-3-clause
|
mediainaf/AppINAFAndroid
|
java
|
## Code Before:
/* Copyright (c) 2014 Andrea Zoli. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
package it.inaf.android;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Locale;
public class DateFormatter {
public static String format(String date)
{
SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z");
java.util.Date tmpDate = null;
try {
tmpDate = format.parse(date);
} catch(ParseException e) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
try {
tmpDate = format.parse(date);
} catch (ParseException e1) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
try {
tmpDate = format.parse(date);
} catch (ParseException e2) {
e2.printStackTrace();
}
}
}
SimpleDateFormat postFormater = new SimpleDateFormat("dd.MM.yyyy", Locale.ITALY);
return postFormater.format(tmpDate);
}
}
## Instruction:
Use Locale.UK while parsing dates.
## Code After:
/* Copyright (c) 2014 Andrea Zoli. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
package it.inaf.android;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Locale;
public class DateFormatter {
public static String format(String date)
{
SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z", Locale.UK);
java.util.Date tmpDate = null;
try {
tmpDate = format.parse(date);
} catch(ParseException e) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.UK);
try {
tmpDate = format.parse(date);
} catch (ParseException e1) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.UK);
try {
tmpDate = format.parse(date);
} catch (ParseException e2) {
e2.printStackTrace();
}
}
}
SimpleDateFormat postFormater = new SimpleDateFormat("dd.MM.yyyy", Locale.ITALY);
return postFormater.format(tmpDate);
}
}
|
...
public class DateFormatter {
public static String format(String date)
{
SimpleDateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z", Locale.UK);
java.util.Date tmpDate = null;
try {
tmpDate = format.parse(date);
} catch(ParseException e) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.UK);
try {
tmpDate = format.parse(date);
} catch (ParseException e1) {
format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.UK);
try {
tmpDate = format.parse(date);
} catch (ParseException e2) {
...
|
9c3dcb096a912dc75d6ad36fc2dabaebeb1b1e22
|
rsa-algorithm/src/Encryption.java
|
rsa-algorithm/src/Encryption.java
|
/**
* Project RSA Algorithm.
* Copyright Michał Szczygieł.
* Created at Feb 24, 2014.
*/
/**
* TODO COMMENTS MISSING!
*
* @author Michał Szczygieł <[email protected]>
*
*/
public interface Encryption {
}
|
import java.math.BigInteger;
/**
* Project RSA Algorithm.
* Copyright Michał Szczygieł.
* Created at Feb 24, 2014.
*/
/**
* This interface describe the basic operation with encryption algorithm.
*
* @author Michał Szczygieł <[email protected]>
*
*/
public interface Encryption {
/**
* This method makes operation to achieve decrypted message.
*
* @param valueToDecrypt
* The value of which will be performed the decryption process.
* @return Decrypted value.
*/
public BigInteger decrypt(BigInteger valueToDecrypt);
/**
* This method makes operation to create encrypted message.
*
* @param valueToEncrypt
* The value of which will be performed the encryption process
* @return Encrypted value.
*/
public BigInteger encrypt(BigInteger valueToEncrypt);
}
|
Define interface for encryption operations
|
Define interface for encryption operations
|
Java
|
mit
|
M4GiK/tosi-projects,M4GiK/tosi-projects
|
java
|
## Code Before:
/**
* Project RSA Algorithm.
* Copyright Michał Szczygieł.
* Created at Feb 24, 2014.
*/
/**
* TODO COMMENTS MISSING!
*
* @author Michał Szczygieł <[email protected]>
*
*/
public interface Encryption {
}
## Instruction:
Define interface for encryption operations
## Code After:
import java.math.BigInteger;
/**
* Project RSA Algorithm.
* Copyright Michał Szczygieł.
* Created at Feb 24, 2014.
*/
/**
* This interface describe the basic operation with encryption algorithm.
*
* @author Michał Szczygieł <[email protected]>
*
*/
public interface Encryption {
/**
* This method makes operation to achieve decrypted message.
*
* @param valueToDecrypt
* The value of which will be performed the decryption process.
* @return Decrypted value.
*/
public BigInteger decrypt(BigInteger valueToDecrypt);
/**
* This method makes operation to create encrypted message.
*
* @param valueToEncrypt
* The value of which will be performed the encryption process
* @return Encrypted value.
*/
public BigInteger encrypt(BigInteger valueToEncrypt);
}
|
...
import java.math.BigInteger;
/**
* Project RSA Algorithm.
* Copyright Michał Szczygieł.
...
*/
/**
* This interface describe the basic operation with encryption algorithm.
*
* @author Michał Szczygieł <[email protected]>
*
...
*/
public interface Encryption {
/**
* This method makes operation to achieve decrypted message.
*
* @param valueToDecrypt
* The value of which will be performed the decryption process.
* @return Decrypted value.
*/
public BigInteger decrypt(BigInteger valueToDecrypt);
/**
* This method makes operation to create encrypted message.
*
* @param valueToEncrypt
* The value of which will be performed the encryption process
* @return Encrypted value.
*/
public BigInteger encrypt(BigInteger valueToEncrypt);
}
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.