commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
09dd2e16ef29b6c79ee344a55bea5bd0e59c7a59
|
fireplace/cards/gvg/shaman.py
|
fireplace/cards/gvg/shaman.py
|
from ..utils import *
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
|
from ..utils import *
##
# Minions
# Vitality Totem
class GVG_039:
OWN_TURN_END = [Heal(FRIENDLY_HERO, 4)]
# Siltfin Spiritwalker
class GVG_040:
def OWN_MINION_DESTROY(self, minion):
if minion.race == Race.MURLOC:
return [Draw(CONTROLLER, 1)]
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
# Crackle
class GVG_038:
def action(self, target):
return [Hit(TARGET, random.randint(3, 6))]
##
# Weapons
# Powermace
class GVG_036:
action = [Buff(RANDOM(FRIENDLY_MINIONS + MECH), "GVG_036e")]
|
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
|
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
|
Python
|
agpl-3.0
|
oftc-ftw/fireplace,Meerkov/fireplace,Ragowit/fireplace,butozerca/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,amw2104/fireplace,oftc-ftw/fireplace,NightKev/fireplace,butozerca/fireplace,amw2104/fireplace,beheh/fireplace,liujimj/fireplace,jleclanche/fireplace,Ragowit/fireplace,Meerkov/fireplace,liujimj/fireplace
|
python
|
## Code Before:
from ..utils import *
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
## Instruction:
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
## Code After:
from ..utils import *
##
# Minions
# Vitality Totem
class GVG_039:
OWN_TURN_END = [Heal(FRIENDLY_HERO, 4)]
# Siltfin Spiritwalker
class GVG_040:
def OWN_MINION_DESTROY(self, minion):
if minion.race == Race.MURLOC:
return [Draw(CONTROLLER, 1)]
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
# Crackle
class GVG_038:
def action(self, target):
return [Hit(TARGET, random.randint(3, 6))]
##
# Weapons
# Powermace
class GVG_036:
action = [Buff(RANDOM(FRIENDLY_MINIONS + MECH), "GVG_036e")]
|
...
from ..utils import *
##
# Minions
# Vitality Totem
class GVG_039:
OWN_TURN_END = [Heal(FRIENDLY_HERO, 4)]
# Siltfin Spiritwalker
class GVG_040:
def OWN_MINION_DESTROY(self, minion):
if minion.race == Race.MURLOC:
return [Draw(CONTROLLER, 1)]
##
...
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
# Crackle
class GVG_038:
def action(self, target):
return [Hit(TARGET, random.randint(3, 6))]
##
# Weapons
# Powermace
class GVG_036:
action = [Buff(RANDOM(FRIENDLY_MINIONS + MECH), "GVG_036e")]
...
|
0b8a1228b773bc4e9a865a913a231f7e0d1e7a15
|
platform/shared/common/stat.h
|
platform/shared/common/stat.h
|
/*
#if defined(__APPLE__) && !(defined(__DARWIN_ONLY_64_BIT_INO_T) && ___DARWIN_ONLY_64_BIT_INO_T)
#define __DARWIN_ONLY_64_BIT_INO_T 1
#endif
*/
#include <sys/stat.h>
|
/* Simulator 3.2 or less */
#define RHO_IPHONE_SIMULATOR_3
#endif
#ifdef RHO_IPHONE_SIMULATOR_3
#ifdef stat
#undef stat
#endif
#ifdef lstat
#undef lstat
#endif
#ifdef fstat
#undef fstat
#endif
#endif
#include <sys/stat.h>
#ifdef RHO_IPHONE_SIMULATOR_3
/*
* map stat functions and structure to theirs 64-bit analogues to be binary
* compatible with iPhone 4 x86/x86_64 application - in iPhone 4 SDK stat
* become 64-bit only so enabling such mapping we could run applications built
* with 3.x SDK on iPhone 4 simulator
* This is not required for iPhone devices - there stat was always 64-bit.
*/
#define stat stat64
#define lstat lstat64
#define fstat fstat64
#endif
|
Make iPhone simulator build binary compatible between 3.x and 4.x
|
Make iPhone simulator build binary compatible between 3.x and 4.x
|
C
|
mit
|
pslgoh/rhodes,tauplatform/tau,watusi/rhodes,tauplatform/tau,tauplatform/tau,tauplatform/tau,tauplatform/tau,watusi/rhodes,rhomobile/rhodes,rhomobile/rhodes,tauplatform/tau,tauplatform/tau,watusi/rhodes,pslgoh/rhodes,pslgoh/rhodes,tauplatform/tau,rhomobile/rhodes,rhomobile/rhodes,watusi/rhodes,pslgoh/rhodes,watusi/rhodes,pslgoh/rhodes,watusi/rhodes,rhomobile/rhodes,rhomobile/rhodes,tauplatform/tau,watusi/rhodes,watusi/rhodes,rhomobile/rhodes,pslgoh/rhodes,watusi/rhodes,pslgoh/rhodes,pslgoh/rhodes,tauplatform/tau,rhomobile/rhodes,watusi/rhodes,pslgoh/rhodes,rhomobile/rhodes,rhomobile/rhodes,pslgoh/rhodes
|
c
|
## Code Before:
/*
#if defined(__APPLE__) && !(defined(__DARWIN_ONLY_64_BIT_INO_T) && ___DARWIN_ONLY_64_BIT_INO_T)
#define __DARWIN_ONLY_64_BIT_INO_T 1
#endif
*/
#include <sys/stat.h>
## Instruction:
Make iPhone simulator build binary compatible between 3.x and 4.x
## Code After:
/* Simulator 3.2 or less */
#define RHO_IPHONE_SIMULATOR_3
#endif
#ifdef RHO_IPHONE_SIMULATOR_3
#ifdef stat
#undef stat
#endif
#ifdef lstat
#undef lstat
#endif
#ifdef fstat
#undef fstat
#endif
#endif
#include <sys/stat.h>
#ifdef RHO_IPHONE_SIMULATOR_3
/*
* map stat functions and structure to theirs 64-bit analogues to be binary
* compatible with iPhone 4 x86/x86_64 application - in iPhone 4 SDK stat
* become 64-bit only so enabling such mapping we could run applications built
* with 3.x SDK on iPhone 4 simulator
* This is not required for iPhone devices - there stat was always 64-bit.
*/
#define stat stat64
#define lstat lstat64
#define fstat fstat64
#endif
|
...
/* Simulator 3.2 or less */
#define RHO_IPHONE_SIMULATOR_3
#endif
#ifdef RHO_IPHONE_SIMULATOR_3
#ifdef stat
#undef stat
#endif
#ifdef lstat
#undef lstat
#endif
#ifdef fstat
#undef fstat
#endif
#endif
#include <sys/stat.h>
#ifdef RHO_IPHONE_SIMULATOR_3
/*
* map stat functions and structure to theirs 64-bit analogues to be binary
* compatible with iPhone 4 x86/x86_64 application - in iPhone 4 SDK stat
* become 64-bit only so enabling such mapping we could run applications built
* with 3.x SDK on iPhone 4 simulator
* This is not required for iPhone devices - there stat was always 64-bit.
*/
#define stat stat64
#define lstat lstat64
#define fstat fstat64
#endif
...
|
79996420e775994b53d88f5b7c9ad21106a77831
|
examples/tests/test_examples.py
|
examples/tests/test_examples.py
|
import pytest
from examples.gbest_pso import main as gbest
from examples.lbest_pso import main as lbest
from examples.gc_pso import main as gc
from examples.pso_optimizer import main as pso_optimizer
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gbest_pso(dimension, iterations):
gbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_lbest_pso(dimension, iterations):
lbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
gc(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
pso_optimizer(dimension, iterations)
|
import pytest
from examples.gbest_pso import main as gbest
from examples.gc_pso import main as gc
from examples.lbest_pso import main as lbest
from examples.pso_optimizer import main as pso_optimizer
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gbest_pso(dimension, iterations):
gbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_lbest_pso(dimension, iterations):
lbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
gc(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
pso_optimizer(dimension, iterations)
|
Add license header and file documentation
|
Add license header and file documentation
|
Python
|
apache-2.0
|
avanwyk/cipy
|
python
|
## Code Before:
import pytest
from examples.gbest_pso import main as gbest
from examples.lbest_pso import main as lbest
from examples.gc_pso import main as gc
from examples.pso_optimizer import main as pso_optimizer
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gbest_pso(dimension, iterations):
gbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_lbest_pso(dimension, iterations):
lbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
gc(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
pso_optimizer(dimension, iterations)
## Instruction:
Add license header and file documentation
## Code After:
import pytest
from examples.gbest_pso import main as gbest
from examples.gc_pso import main as gc
from examples.lbest_pso import main as lbest
from examples.pso_optimizer import main as pso_optimizer
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gbest_pso(dimension, iterations):
gbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_lbest_pso(dimension, iterations):
lbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
gc(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
pso_optimizer(dimension, iterations)
|
# ... existing code ...
import pytest
from examples.gbest_pso import main as gbest
from examples.gc_pso import main as gc
from examples.lbest_pso import main as lbest
from examples.pso_optimizer import main as pso_optimizer
# ... rest of the code ...
|
20ceaae7f4d601c811db7140f43d209b6cdf9fc7
|
storage-controller-lib/src/main/java/enmasse/storage/controller/admin/FlavorManager.java
|
storage-controller-lib/src/main/java/enmasse/storage/controller/admin/FlavorManager.java
|
package enmasse.storage.controller.admin;
import com.fasterxml.jackson.databind.JsonNode;
import enmasse.storage.controller.model.FlavorConfig;
import enmasse.storage.controller.model.parser.FlavorConfigParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author Ulf Lilleengen
*/
public class FlavorManager implements ConfigManager, FlavorRepository {
private volatile Map<String, FlavorConfig> flavorMap = Collections.emptyMap();
private final FlavorConfigParser parser = new FlavorConfigParser();
@Override
public FlavorConfig getFlavor(String flavorName) {
FlavorConfig flavor = flavorMap.get(flavorName);
if (flavor == null) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
throw new IllegalArgumentException(String.format("No flavor with name '%s' exists, have [%s]", flavorName, flavors));
}
return flavor;
}
@Override
public FlavorConfig getDefaultFlavor() {
return new FlavorConfig.Builder().build();
}
@Override
public void configUpdated(JsonNode jsonConfig) throws IOException {
configUpdated(parser.parse(jsonConfig));
}
public void configUpdated(Map<String, FlavorConfig> flavorMap) {
this.flavorMap = flavorMap;
}
}
|
package enmasse.storage.controller.admin;
import com.fasterxml.jackson.databind.JsonNode;
import enmasse.storage.controller.model.FlavorConfig;
import enmasse.storage.controller.model.parser.FlavorConfigParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* @author Ulf Lilleengen
*/
public class FlavorManager implements ConfigManager, FlavorRepository {
private static final Logger log = Logger.getLogger(FlavorManager.class.getName());
private volatile Map<String, FlavorConfig> flavorMap = Collections.emptyMap();
private final FlavorConfigParser parser = new FlavorConfigParser();
@Override
public FlavorConfig getFlavor(String flavorName) {
FlavorConfig flavor = flavorMap.get(flavorName);
if (flavor == null) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
throw new IllegalArgumentException(String.format("No flavor with name '%s' exists, have [%s]", flavorName, flavors));
}
return flavor;
}
@Override
public FlavorConfig getDefaultFlavor() {
return new FlavorConfig.Builder().build();
}
@Override
public void configUpdated(JsonNode jsonConfig) throws IOException {
configUpdated(parser.parse(jsonConfig));
}
public void configUpdated(Map<String, FlavorConfig> flavorMap) {
this.flavorMap = flavorMap;
if (log.isLoggable(Level.INFO)) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
log.log(Level.INFO, String.format("Got new set of flavors: [%s]", flavors));
}
}
}
|
Add logging when flavors are updated
|
Add logging when flavors are updated
|
Java
|
apache-2.0
|
jenmalloy/enmasse,jenmalloy/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse
|
java
|
## Code Before:
package enmasse.storage.controller.admin;
import com.fasterxml.jackson.databind.JsonNode;
import enmasse.storage.controller.model.FlavorConfig;
import enmasse.storage.controller.model.parser.FlavorConfigParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author Ulf Lilleengen
*/
public class FlavorManager implements ConfigManager, FlavorRepository {
private volatile Map<String, FlavorConfig> flavorMap = Collections.emptyMap();
private final FlavorConfigParser parser = new FlavorConfigParser();
@Override
public FlavorConfig getFlavor(String flavorName) {
FlavorConfig flavor = flavorMap.get(flavorName);
if (flavor == null) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
throw new IllegalArgumentException(String.format("No flavor with name '%s' exists, have [%s]", flavorName, flavors));
}
return flavor;
}
@Override
public FlavorConfig getDefaultFlavor() {
return new FlavorConfig.Builder().build();
}
@Override
public void configUpdated(JsonNode jsonConfig) throws IOException {
configUpdated(parser.parse(jsonConfig));
}
public void configUpdated(Map<String, FlavorConfig> flavorMap) {
this.flavorMap = flavorMap;
}
}
## Instruction:
Add logging when flavors are updated
## Code After:
package enmasse.storage.controller.admin;
import com.fasterxml.jackson.databind.JsonNode;
import enmasse.storage.controller.model.FlavorConfig;
import enmasse.storage.controller.model.parser.FlavorConfigParser;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* @author Ulf Lilleengen
*/
public class FlavorManager implements ConfigManager, FlavorRepository {
private static final Logger log = Logger.getLogger(FlavorManager.class.getName());
private volatile Map<String, FlavorConfig> flavorMap = Collections.emptyMap();
private final FlavorConfigParser parser = new FlavorConfigParser();
@Override
public FlavorConfig getFlavor(String flavorName) {
FlavorConfig flavor = flavorMap.get(flavorName);
if (flavor == null) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
throw new IllegalArgumentException(String.format("No flavor with name '%s' exists, have [%s]", flavorName, flavors));
}
return flavor;
}
@Override
public FlavorConfig getDefaultFlavor() {
return new FlavorConfig.Builder().build();
}
@Override
public void configUpdated(JsonNode jsonConfig) throws IOException {
configUpdated(parser.parse(jsonConfig));
}
public void configUpdated(Map<String, FlavorConfig> flavorMap) {
this.flavorMap = flavorMap;
if (log.isLoggable(Level.INFO)) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
log.log(Level.INFO, String.format("Got new set of flavors: [%s]", flavors));
}
}
}
|
# ... existing code ...
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
# ... modified code ...
* @author Ulf Lilleengen
*/
public class FlavorManager implements ConfigManager, FlavorRepository {
private static final Logger log = Logger.getLogger(FlavorManager.class.getName());
private volatile Map<String, FlavorConfig> flavorMap = Collections.emptyMap();
private final FlavorConfigParser parser = new FlavorConfigParser();
...
public void configUpdated(Map<String, FlavorConfig> flavorMap) {
this.flavorMap = flavorMap;
if (log.isLoggable(Level.INFO)) {
String flavors = flavorMap.keySet().stream().collect(Collectors.joining(","));
log.log(Level.INFO, String.format("Got new set of flavors: [%s]", flavors));
}
}
}
# ... rest of the code ...
|
6989e6b2308cbe496857b5f911c136fcf3043444
|
zeus/api/resources/user_token.py
|
zeus/api/resources/user_token.py
|
from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
|
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
|
Fix user token endpoint authorization
|
fix(token): Fix user token endpoint authorization
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
python
|
## Code Before:
from flask import Response
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def dispatch_request(self, *args, **kwargs) -> Response:
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
return Resource.dispatch_request(self, user, *args, **kwargs)
def get(self, user):
"""
Return the API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self, user):
"""
Create a new API token for the user.
"""
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
## Instruction:
fix(token): Fix user token endpoint authorization
## Code After:
from sqlalchemy.exc import IntegrityError
from zeus import auth
from zeus.config import db
from zeus.models import UserApiToken
from .base import Resource
from ..schemas import TokenSchema
token_schema = TokenSchema(strict=True)
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
if token:
token.key = UserApiToken.generate_token()
else:
token = UserApiToken(user=user)
try:
db.session.add(token)
db.session.commit()
except IntegrityError:
db.session.rollback()
return self.respond(status=422)
return self.respond_with_schema(token_schema, token)
|
// ... existing code ...
from sqlalchemy.exc import IntegrityError
from zeus import auth
// ... modified code ...
class UserTokenResource(Resource):
def get(self):
"""
Return the API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
...
return self.respond_with_schema(token_schema, token)
def post(self):
"""
Create a new API token for the user.
"""
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
token = UserApiToken.query \
.filter(UserApiToken.user == user) \
.one_or_none()
// ... rest of the code ...
|
66ee767857cad09de20744e50e11c923cd4f1db9
|
src/test/java/org/projectodd/nodej/NodejTestSupport.java
|
src/test/java/org/projectodd/nodej/NodejTestSupport.java
|
package org.projectodd.nodej;
import org.dynjs.runtime.DynJS;
import org.dynjs.runtime.ExecutionContext;
import org.dynjs.runtime.GlobalObject;
import org.junit.Before;
public class NodejTestSupport {
protected DynJS runtime;
protected ExecutionContext context;
protected String[] defaultArgs = { "node", "somearg" };
@Before
public void setUp() {
System.setProperty("dynjs.require.path", System.getProperty("user.dir") + "/src/main/javascript/node/lib");
runtime = new DynJS();
context = runtime.getExecutionContext();
GlobalObject globalObject = context.getGlobalObject();
globalObject.defineGlobalProperty("process", new Process(globalObject, defaultArgs));
}
}
|
package org.projectodd.nodej;
import org.dynjs.runtime.DynJS;
import org.dynjs.runtime.ExecutionContext;
import org.junit.Before;
public class NodejTestSupport {
protected DynJS runtime;
protected ExecutionContext context;
protected String[] defaultArgs = { "node", "somearg" };
@Before
public void setUp() {
System.setProperty("dynjs.require.path", System.getProperty("user.dir") + "/src/main/javascript/node/lib");
runtime = new DynJS();
context = runtime.getExecutionContext();
Node node = new Node(defaultArgs);
node.start(context);
}
}
|
Initialize a Node(), which should set up process for us
|
Initialize a Node(), which should set up process for us
|
Java
|
apache-2.0
|
nodyn/nodyn,tony--/nodyn,nodyn/nodyn,nodyn/nodyn,tony--/nodyn,tony--/nodyn,dherges/nodyn,dherges/nodyn,dherges/nodyn
|
java
|
## Code Before:
package org.projectodd.nodej;
import org.dynjs.runtime.DynJS;
import org.dynjs.runtime.ExecutionContext;
import org.dynjs.runtime.GlobalObject;
import org.junit.Before;
public class NodejTestSupport {
protected DynJS runtime;
protected ExecutionContext context;
protected String[] defaultArgs = { "node", "somearg" };
@Before
public void setUp() {
System.setProperty("dynjs.require.path", System.getProperty("user.dir") + "/src/main/javascript/node/lib");
runtime = new DynJS();
context = runtime.getExecutionContext();
GlobalObject globalObject = context.getGlobalObject();
globalObject.defineGlobalProperty("process", new Process(globalObject, defaultArgs));
}
}
## Instruction:
Initialize a Node(), which should set up process for us
## Code After:
package org.projectodd.nodej;
import org.dynjs.runtime.DynJS;
import org.dynjs.runtime.ExecutionContext;
import org.junit.Before;
public class NodejTestSupport {
protected DynJS runtime;
protected ExecutionContext context;
protected String[] defaultArgs = { "node", "somearg" };
@Before
public void setUp() {
System.setProperty("dynjs.require.path", System.getProperty("user.dir") + "/src/main/javascript/node/lib");
runtime = new DynJS();
context = runtime.getExecutionContext();
Node node = new Node(defaultArgs);
node.start(context);
}
}
|
# ... existing code ...
import org.dynjs.runtime.DynJS;
import org.dynjs.runtime.ExecutionContext;
import org.junit.Before;
public class NodejTestSupport {
# ... modified code ...
System.setProperty("dynjs.require.path", System.getProperty("user.dir") + "/src/main/javascript/node/lib");
runtime = new DynJS();
context = runtime.getExecutionContext();
Node node = new Node(defaultArgs);
node.start(context);
}
}
# ... rest of the code ...
|
a275068193c87c5a27758c17d7699e963a0bdfa8
|
llvmpy/src/Support/FormattedStream.py
|
llvmpy/src/Support/FormattedStream.py
|
from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
new = Constructor(ref(raw_ostream), cast(bool, Bool))
|
from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
_new = Constructor(ref(raw_ostream), cast(bool, Bool))
@CustomPythonStaticMethod
def new(stream, destroy=False):
inst = formatted_raw_ostream._new(stream, destroy)
inst.__underlying_stream = stream # to prevent it being freed first
return inst
|
Fix formatted_raw_ostream ownership error with the underlying stream.
|
Fix formatted_raw_ostream ownership error with the underlying stream.
|
Python
|
bsd-3-clause
|
llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy
|
python
|
## Code Before:
from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
new = Constructor(ref(raw_ostream), cast(bool, Bool))
## Instruction:
Fix formatted_raw_ostream ownership error with the underlying stream.
## Code After:
from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
_new = Constructor(ref(raw_ostream), cast(bool, Bool))
@CustomPythonStaticMethod
def new(stream, destroy=False):
inst = formatted_raw_ostream._new(stream, destroy)
inst.__underlying_stream = stream # to prevent it being freed first
return inst
|
...
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
_new = Constructor(ref(raw_ostream), cast(bool, Bool))
@CustomPythonStaticMethod
def new(stream, destroy=False):
inst = formatted_raw_ostream._new(stream, destroy)
inst.__underlying_stream = stream # to prevent it being freed first
return inst
...
|
b9654ffbbd1c2057d1ff377a0190b115f568d080
|
knights/defaulttags.py
|
knights/defaulttags.py
|
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
|
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
Rewrite 'now' tag to use BasicNode
|
Rewrite 'now' tag to use BasicNode
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
python
|
## Code Before:
from .library import Library
import datetime
register = Library()
@register.tag(name='now')
def now(parser, token):
args, kwargs = parser.parse_args(token)
def _now(context):
a, k = parser.resolve_args(context, args, kwargs)
val = datetime.datetime.now()
return val.strftime(a[0])
return _now
## Instruction:
Rewrite 'now' tag to use BasicNode
## Code After:
from .library import Library
from .parse import BasicNode
import datetime
register = Library()
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
|
...
from .library import Library
from .parse import BasicNode
import datetime
...
@register.tag(name='now')
class NowNode(BasicNode):
def render(self, fmt):
val = datetime.datetime.now()
return val.strftime(fmt)
...
|
776c7a86607d2d28990f6877716bfea0bd61bc3c
|
src/main/java/com/telus/training/ex1/ClockNeedles.java
|
src/main/java/com/telus/training/ex1/ClockNeedles.java
|
package com.telus.training.ex1;
public class ClockNeedles {
public double calculateAngle(int hours, int minutes) {
double hoursAngle = hours*30;
double minutesAngle = minutes*6;
double angle = Math.abs(hoursAngle - minutesAngle);
if(angle > 180.0) angle = angle - 180.0;
return angle;
}
}
|
package com.telus.training.ex1;
public class ClockNeedles {
public double calculateAngleBetween(int hours, int minutes) {
double hoursAngle = convertHoursToAngle(hours);
double minutesAngle = convertMinutesToAngle(minutes);
double angle = Math.abs(hoursAngle - minutesAngle);
return (angle > 180.0) ? (angle - 180.0) : angle;
}
private int convertMinutesToAngle(int minutes) {
if (minutes < 0 || minutes > 60) {
throw new IllegalArgumentException("Minutes must be between 0 and 60");
}
return minutes*6;
}
private int convertHoursToAngle(int hours) {
if(hours < 0 || hours > 12) {
throw new IllegalArgumentException("Hours must be between 0 and 12");
}
return hours*30;
}
}
|
Add checks for hours and minutes
|
Add checks for hours and minutes
|
Java
|
mit
|
tristanles/tdd-ex1-java
|
java
|
## Code Before:
package com.telus.training.ex1;
public class ClockNeedles {
public double calculateAngle(int hours, int minutes) {
double hoursAngle = hours*30;
double minutesAngle = minutes*6;
double angle = Math.abs(hoursAngle - minutesAngle);
if(angle > 180.0) angle = angle - 180.0;
return angle;
}
}
## Instruction:
Add checks for hours and minutes
## Code After:
package com.telus.training.ex1;
public class ClockNeedles {
public double calculateAngleBetween(int hours, int minutes) {
double hoursAngle = convertHoursToAngle(hours);
double minutesAngle = convertMinutesToAngle(minutes);
double angle = Math.abs(hoursAngle - minutesAngle);
return (angle > 180.0) ? (angle - 180.0) : angle;
}
private int convertMinutesToAngle(int minutes) {
if (minutes < 0 || minutes > 60) {
throw new IllegalArgumentException("Minutes must be between 0 and 60");
}
return minutes*6;
}
private int convertHoursToAngle(int hours) {
if(hours < 0 || hours > 12) {
throw new IllegalArgumentException("Hours must be between 0 and 12");
}
return hours*30;
}
}
|
...
public class ClockNeedles {
public double calculateAngleBetween(int hours, int minutes) {
double hoursAngle = convertHoursToAngle(hours);
double minutesAngle = convertMinutesToAngle(minutes);
double angle = Math.abs(hoursAngle - minutesAngle);
return (angle > 180.0) ? (angle - 180.0) : angle;
}
private int convertMinutesToAngle(int minutes) {
if (minutes < 0 || minutes > 60) {
throw new IllegalArgumentException("Minutes must be between 0 and 60");
}
return minutes*6;
}
private int convertHoursToAngle(int hours) {
if(hours < 0 || hours > 12) {
throw new IllegalArgumentException("Hours must be between 0 and 12");
}
return hours*30;
}
}
...
|
bf4af59b4a9d0637d3743b6b6ff0eaef18dbb902
|
flask_restplus/namespace.py
|
flask_restplus/namespace.py
|
from __future__ import unicode_literals
class ApiNamespace(object):
def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs):
self.api = api
self.name = name
self.path = path or ('/' + name)
self.description = description
self.resources = []
self.models = []
def add_resource(self, resource, *urls, **kwargs):
self.resources.append((resource, urls, kwargs))
self.api.add_resource(resource, *urls, namespace=self, **kwargs)
def route(self, *urls, **kwargs):
def wrapper(cls):
doc = kwargs.pop('doc', None)
if doc:
self.api._handle_api_doc(cls, doc)
self.add_resource(cls, *[self.path + url for url in urls], **kwargs)
return cls
return wrapper
|
from __future__ import unicode_literals
class ApiNamespace(object):
def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs):
self.api = api
self.name = name
self.path = path or ('/' + name)
self.description = description
self.resources = []
self.models = []
def add_resource(self, resource, *urls, **kwargs):
self.resources.append((resource, urls, kwargs))
self.api.add_resource(resource, *urls, namespace=self, **kwargs)
def route(self, *urls, **kwargs):
def wrapper(cls):
doc = kwargs.pop('doc', None)
if doc is not None:
self.api._handle_api_doc(cls, doc)
self.add_resource(cls, *[self.path + url for url in urls], **kwargs)
return cls
return wrapper
|
Hide resource if doc is False
|
Hide resource if doc is False
|
Python
|
mit
|
leiserfg/flask-restplus,luminusnetworks/flask-restplus,awiddersheim/flask-restplus,awiddersheim/flask-restplus,fixedd/flask-restplus,luminusnetworks/flask-restplus,fixedd/flask-restplus,leiserfg/flask-restplus
|
python
|
## Code Before:
from __future__ import unicode_literals
class ApiNamespace(object):
def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs):
self.api = api
self.name = name
self.path = path or ('/' + name)
self.description = description
self.resources = []
self.models = []
def add_resource(self, resource, *urls, **kwargs):
self.resources.append((resource, urls, kwargs))
self.api.add_resource(resource, *urls, namespace=self, **kwargs)
def route(self, *urls, **kwargs):
def wrapper(cls):
doc = kwargs.pop('doc', None)
if doc:
self.api._handle_api_doc(cls, doc)
self.add_resource(cls, *[self.path + url for url in urls], **kwargs)
return cls
return wrapper
## Instruction:
Hide resource if doc is False
## Code After:
from __future__ import unicode_literals
class ApiNamespace(object):
def __init__(self, api, name, description=None, endpoint=None, path=None, **kwargs):
self.api = api
self.name = name
self.path = path or ('/' + name)
self.description = description
self.resources = []
self.models = []
def add_resource(self, resource, *urls, **kwargs):
self.resources.append((resource, urls, kwargs))
self.api.add_resource(resource, *urls, namespace=self, **kwargs)
def route(self, *urls, **kwargs):
def wrapper(cls):
doc = kwargs.pop('doc', None)
if doc is not None:
self.api._handle_api_doc(cls, doc)
self.add_resource(cls, *[self.path + url for url in urls], **kwargs)
return cls
return wrapper
|
// ... existing code ...
def route(self, *urls, **kwargs):
def wrapper(cls):
doc = kwargs.pop('doc', None)
if doc is not None:
self.api._handle_api_doc(cls, doc)
self.add_resource(cls, *[self.path + url for url in urls], **kwargs)
return cls
// ... rest of the code ...
|
343e3bd0e16df1106d82fa6087a7247dc67bb52b
|
oslo_concurrency/_i18n.py
|
oslo_concurrency/_i18n.py
|
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.concurrency')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.concurrency')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
Drop use of namespaced oslo.i18n
|
Drop use of namespaced oslo.i18n
Related-blueprint: drop-namespace-packages
Change-Id: Ic8247cb896ba6337932d7a74618debd698584fa0
|
Python
|
apache-2.0
|
JioCloud/oslo.concurrency,openstack/oslo.concurrency,varunarya10/oslo.concurrency
|
python
|
## Code Before:
from oslo import i18n
_translators = i18n.TranslatorFactory(domain='oslo.concurrency')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
## Instruction:
Drop use of namespaced oslo.i18n
Related-blueprint: drop-namespace-packages
Change-Id: Ic8247cb896ba6337932d7a74618debd698584fa0
## Code After:
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.concurrency')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
...
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.concurrency')
# The primary translation function using the well-known name "_"
_ = _translators.primary
...
|
f00c7f3a976ba4790963a5701c5ce13f6dcd84fa
|
tests/test_funcmakers.py
|
tests/test_funcmakers.py
|
import inspect
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
Remove unused import from tests
|
Remove unused import from tests
|
Python
|
bsd-3-clause
|
Suor/funcy
|
python
|
## Code Before:
import inspect
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
## Instruction:
Remove unused import from tests
## Code After:
from collections import defaultdict
import pytest
from funcy.funcmakers import *
def test_callable():
assert make_func(lambda x: x + 42)(0) == 42
def test_int():
assert make_func(0)('abc') == 'a'
assert make_func(2)([1,2,3]) == 3
assert make_func(1)({1: 'a'}) == 'a'
with pytest.raises(IndexError): make_func(1)('a')
with pytest.raises(TypeError): make_func(1)(42)
def test_slice():
assert make_func(slice(1, None))('abc') == 'bc'
def test_str():
assert make_func('\d+')('ab42c') == '42'
assert make_func('\d+')('abc') is None
assert make_pred('\d+')('ab42c') is True
assert make_pred('\d+')('abc') is False
def test_dict():
assert make_func({1: 'a'})(1) == 'a'
with pytest.raises(KeyError): make_func({1: 'a'})(2)
d = defaultdict(int, a=42)
assert make_func(d)('a') == 42
assert make_func(d)('b') == 0
def test_set():
s = set([1,2,3])
assert make_func(s)(1) is True
assert make_func(s)(4) is False
|
# ... existing code ...
from collections import defaultdict
import pytest
# ... rest of the code ...
|
7a24f314c426e55735836dd2f805d9e0364dc871
|
tarbell/hooks.py
|
tarbell/hooks.py
|
hooks = {
'newproject': [], # (site)
'generate': [], # (site, dir, extra_context)
'publish': [], # (site, s3)
'install': [], # (site, project)
'preview': [], # (site)
'server_start': [], # (site)
'server_stop': [], # (site)
}
class register_hook(object):
"""
Register hook with @register_hook("EVENT") where EVENT is "newproject" etc.
"""
def __init__(self, event):
self.event = event
def __call__(self, f):
# Avoid weird duplication
names = ['{0}.{1}'.format(func.__module__, func.func_name) for func in hooks[self.event]]
if '{0}.{1}'.format(f.__module__, f.func_name) not in names:
hooks[self.event].append(f)
return f
|
hooks = {
'newproject': [], # (site)
'generate': [], # (site, dir, extra_context)
'publish': [], # (site, s3)
'install': [], # (site, project)
'preview': [], # (site)
'server_start': [], # (site)
'server_stop': [], # (site)
}
class register_hook(object):
"""
Register hook with @register_hook("EVENT") where EVENT is "newproject" etc.
"""
def __init__(self, event):
self.event = event
def __call__(self, f):
# Avoid weird duplication
names = ['{0}.{1}'.format(func.__module__, func.__name__) for func in hooks[self.event]]
if '{0}.{1}'.format(f.__module__, f.__name__) not in names:
hooks[self.event].append(f)
return f
|
Switch to Python 3-friendly `function.__name__`
|
Switch to Python 3-friendly `function.__name__`
|
Python
|
bsd-3-clause
|
tarbell-project/tarbell,eyeseast/tarbell,tarbell-project/tarbell,eyeseast/tarbell
|
python
|
## Code Before:
hooks = {
'newproject': [], # (site)
'generate': [], # (site, dir, extra_context)
'publish': [], # (site, s3)
'install': [], # (site, project)
'preview': [], # (site)
'server_start': [], # (site)
'server_stop': [], # (site)
}
class register_hook(object):
"""
Register hook with @register_hook("EVENT") where EVENT is "newproject" etc.
"""
def __init__(self, event):
self.event = event
def __call__(self, f):
# Avoid weird duplication
names = ['{0}.{1}'.format(func.__module__, func.func_name) for func in hooks[self.event]]
if '{0}.{1}'.format(f.__module__, f.func_name) not in names:
hooks[self.event].append(f)
return f
## Instruction:
Switch to Python 3-friendly `function.__name__`
## Code After:
hooks = {
'newproject': [], # (site)
'generate': [], # (site, dir, extra_context)
'publish': [], # (site, s3)
'install': [], # (site, project)
'preview': [], # (site)
'server_start': [], # (site)
'server_stop': [], # (site)
}
class register_hook(object):
"""
Register hook with @register_hook("EVENT") where EVENT is "newproject" etc.
"""
def __init__(self, event):
self.event = event
def __call__(self, f):
# Avoid weird duplication
names = ['{0}.{1}'.format(func.__module__, func.__name__) for func in hooks[self.event]]
if '{0}.{1}'.format(f.__module__, f.__name__) not in names:
hooks[self.event].append(f)
return f
|
...
def __call__(self, f):
# Avoid weird duplication
names = ['{0}.{1}'.format(func.__module__, func.__name__) for func in hooks[self.event]]
if '{0}.{1}'.format(f.__module__, f.__name__) not in names:
hooks[self.event].append(f)
return f
...
|
a9cebe11642b41a8c0b277e09bf273b52dbb63f9
|
apps/careeropportunity/views.py
|
apps/careeropportunity/views.py
|
from django.shortcuts import render
from django.utils import timezone
# API v1
from rest_framework import mixins, viewsets
from rest_framework.permissions import AllowAny
from apps.careeropportunity.models import CareerOpportunity
from apps.careeropportunity.serializers import CareerSerializer
def index(request, id=None):
return render(request, 'careeropportunity/index.html')
class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin):
"""
Viewset for Career serializer
"""
queryset = CareerOpportunity.objects.filter(
start__lte=timezone.now(),
end__gte=timezone.now()
).order_by('-featured', '-start')
serializer_class = CareerSerializer
permission_classes = (AllowAny,)
|
from django.shortcuts import render
from django.utils import timezone
# API v1
from rest_framework import mixins, viewsets
from rest_framework.permissions import AllowAny
from rest_framework.pagination import PageNumberPagination
from apps.careeropportunity.models import CareerOpportunity
from apps.careeropportunity.serializers import CareerSerializer
def index(request, id=None):
return render(request, 'careeropportunity/index.html')
class HundredItemsPaginator(PageNumberPagination):
page_size = 100
class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin):
"""
Viewset for Career serializer
"""
queryset = CareerOpportunity.objects.filter(
start__lte=timezone.now(),
end__gte=timezone.now()
).order_by('-featured', '-start')
serializer_class = CareerSerializer
permission_classes = (AllowAny,)
pagination_class = HundredItemsPaginator
|
Increase pagination size for careeropportunity api
|
Increase pagination size for careeropportunity api
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
python
|
## Code Before:
from django.shortcuts import render
from django.utils import timezone
# API v1
from rest_framework import mixins, viewsets
from rest_framework.permissions import AllowAny
from apps.careeropportunity.models import CareerOpportunity
from apps.careeropportunity.serializers import CareerSerializer
def index(request, id=None):
return render(request, 'careeropportunity/index.html')
class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin):
"""
Viewset for Career serializer
"""
queryset = CareerOpportunity.objects.filter(
start__lte=timezone.now(),
end__gte=timezone.now()
).order_by('-featured', '-start')
serializer_class = CareerSerializer
permission_classes = (AllowAny,)
## Instruction:
Increase pagination size for careeropportunity api
## Code After:
from django.shortcuts import render
from django.utils import timezone
# API v1
from rest_framework import mixins, viewsets
from rest_framework.permissions import AllowAny
from rest_framework.pagination import PageNumberPagination
from apps.careeropportunity.models import CareerOpportunity
from apps.careeropportunity.serializers import CareerSerializer
def index(request, id=None):
return render(request, 'careeropportunity/index.html')
class HundredItemsPaginator(PageNumberPagination):
page_size = 100
class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin):
"""
Viewset for Career serializer
"""
queryset = CareerOpportunity.objects.filter(
start__lte=timezone.now(),
end__gte=timezone.now()
).order_by('-featured', '-start')
serializer_class = CareerSerializer
permission_classes = (AllowAny,)
pagination_class = HundredItemsPaginator
|
# ... existing code ...
# API v1
from rest_framework import mixins, viewsets
from rest_framework.permissions import AllowAny
from rest_framework.pagination import PageNumberPagination
from apps.careeropportunity.models import CareerOpportunity
from apps.careeropportunity.serializers import CareerSerializer
# ... modified code ...
def index(request, id=None):
return render(request, 'careeropportunity/index.html')
class HundredItemsPaginator(PageNumberPagination):
page_size = 100
class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin):
...
).order_by('-featured', '-start')
serializer_class = CareerSerializer
permission_classes = (AllowAny,)
pagination_class = HundredItemsPaginator
# ... rest of the code ...
|
f891b7d3d09f152adf06bf8442d50f48f85650b8
|
Source/CAFMatchedTextViewController.h
|
Source/CAFMatchedTextViewController.h
|
//
// CAFMatchedTextViewController.h
// Curiosity
//
// Created by Matthew Thomas on 8/26/12.
// Copyright (c) 2012 Matthew Thomas. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface CAFMatchedTextViewController : UIViewController
@end
|
//
// CAFMatchedTextViewController.h
// Curiosity
//
// Created by Matthew Thomas on 8/26/12.
// Copyright (c) 2012 Matthew Thomas. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface CAFMatchedTextViewController : UIViewController
@property (copy, nonatomic) NSString *regexString;
@property (copy, nonatomic) NSString *inputText;
@end
|
Add regexString and inputText properties
|
Add regexString and inputText properties
|
C
|
mit
|
codecaffeine/Curiosity
|
c
|
## Code Before:
//
// CAFMatchedTextViewController.h
// Curiosity
//
// Created by Matthew Thomas on 8/26/12.
// Copyright (c) 2012 Matthew Thomas. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface CAFMatchedTextViewController : UIViewController
@end
## Instruction:
Add regexString and inputText properties
## Code After:
//
// CAFMatchedTextViewController.h
// Curiosity
//
// Created by Matthew Thomas on 8/26/12.
// Copyright (c) 2012 Matthew Thomas. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface CAFMatchedTextViewController : UIViewController
@property (copy, nonatomic) NSString *regexString;
@property (copy, nonatomic) NSString *inputText;
@end
|
// ... existing code ...
#import <UIKit/UIKit.h>
@interface CAFMatchedTextViewController : UIViewController
@property (copy, nonatomic) NSString *regexString;
@property (copy, nonatomic) NSString *inputText;
@end
// ... rest of the code ...
|
fa8cfbc631dfab0067b8c15bf6374579af071e7a
|
tests/test_main.py
|
tests/test_main.py
|
import sys
import unittest
import tempfile
import pathlib
import os
import os.path
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
|
import sys
import unittest
import tempfile
import pathlib
import os
import os.path
import time
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
time.sleep(2)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
|
Add sleep during tests to prevent race
|
Add sleep during tests to prevent race
|
Python
|
bsd-3-clause
|
jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor
|
python
|
## Code Before:
import sys
import unittest
import tempfile
import pathlib
import os
import os.path
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
## Instruction:
Add sleep during tests to prevent race
## Code After:
import sys
import unittest
import tempfile
import pathlib
import os
import os.path
import time
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
time.sleep(2)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
|
// ... existing code ...
import pathlib
import os
import os.path
import time
from unittest.mock import patch
// ... modified code ...
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
time.sleep(2)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
// ... rest of the code ...
|
e662f328eee81cab191389c498776feb69d1df91
|
zigbee-serial-javase/src/test/java/org/bubblecloud/zigbee/ZigBeeSerialNetworkTest.java
|
zigbee-serial-javase/src/test/java/org/bubblecloud/zigbee/ZigBeeSerialNetworkTest.java
|
package org.bubblecloud.zigbee;
import org.bubblecloud.zigbee.network.port.SerialPortImpl;
/**
* Created by Chris on 25/02/15.
*/
public final class ZigBeeSerialNetworkTest extends ZigBeeNetworkTest {
public static final String SerialPortIdentifier = "/dev/ttyACM0";
public static final int BaudRate = 115200;
/**
* Constructor for defining the serial port implementation.
*/
public ZigBeeSerialNetworkTest() {
super(new SerialPortImpl(SerialPortIdentifier, BaudRate));
}
}
|
package org.bubblecloud.zigbee;
import org.bubblecloud.zigbee.network.port.SerialPortImpl;
/**
* Created by Chris on 25/02/15.
*/
public final class ZigBeeSerialNetworkTest extends ZigBeeNetworkTest {
/**
* The serial port identifier.
*/
public static final String SERIAL_PORT_IDENTIFIER = "/dev/ttyACM0";
/**
* The baud rate.
*/
public static final int BAUD_RATE = 115200;
/**
* Constructor for defining the serial port implementation.
*/
public ZigBeeSerialNetworkTest() {
super(new SerialPortImpl(SERIAL_PORT_IDENTIFIER, BAUD_RATE));
}
}
|
Clean up of serial port implementation.
|
Clean up of serial port implementation.
|
Java
|
apache-2.0
|
tlaukkan/zigbee4java,cdjackson/zigbee4java,tlaukkan/zigbee4java,cdjackson/zigbee4java,tlaukkan/zigbee4java,cdjackson/zigbee4java
|
java
|
## Code Before:
package org.bubblecloud.zigbee;
import org.bubblecloud.zigbee.network.port.SerialPortImpl;
/**
* Created by Chris on 25/02/15.
*/
public final class ZigBeeSerialNetworkTest extends ZigBeeNetworkTest {
public static final String SerialPortIdentifier = "/dev/ttyACM0";
public static final int BaudRate = 115200;
/**
* Constructor for defining the serial port implementation.
*/
public ZigBeeSerialNetworkTest() {
super(new SerialPortImpl(SerialPortIdentifier, BaudRate));
}
}
## Instruction:
Clean up of serial port implementation.
## Code After:
package org.bubblecloud.zigbee;
import org.bubblecloud.zigbee.network.port.SerialPortImpl;
/**
* Created by Chris on 25/02/15.
*/
public final class ZigBeeSerialNetworkTest extends ZigBeeNetworkTest {
/**
* The serial port identifier.
*/
public static final String SERIAL_PORT_IDENTIFIER = "/dev/ttyACM0";
/**
* The baud rate.
*/
public static final int BAUD_RATE = 115200;
/**
* Constructor for defining the serial port implementation.
*/
public ZigBeeSerialNetworkTest() {
super(new SerialPortImpl(SERIAL_PORT_IDENTIFIER, BAUD_RATE));
}
}
|
# ... existing code ...
* Created by Chris on 25/02/15.
*/
public final class ZigBeeSerialNetworkTest extends ZigBeeNetworkTest {
/**
* The serial port identifier.
*/
public static final String SERIAL_PORT_IDENTIFIER = "/dev/ttyACM0";
/**
* The baud rate.
*/
public static final int BAUD_RATE = 115200;
/**
* Constructor for defining the serial port implementation.
*/
public ZigBeeSerialNetworkTest() {
super(new SerialPortImpl(SERIAL_PORT_IDENTIFIER, BAUD_RATE));
}
}
# ... rest of the code ...
|
8154b206160cde249c474f5905a60b9a8086c910
|
conftest.py
|
conftest.py
|
"""Configure pytest for metpy."""
import os
import matplotlib
import matplotlib.pyplot
import numpy
import pandas
import pytest
import scipy
import xarray
import metpy.calc
# Need to disable fallback before importing pint
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
import pint # noqa: I100, E402
def pytest_report_header(config, startdir):
"""Add dependency information to pytest output."""
return ('Dependencies: Matplotlib ({}), NumPy ({}), Pandas ({}), '
'Pint ({}), SciPy ({}), Xarray ({})'.format(matplotlib.__version__,
numpy.__version__, pandas.__version__,
pint.__version__, scipy.__version__,
xarray.__version__))
@pytest.fixture(autouse=True)
def doctest_available_modules(doctest_namespace):
"""Make modules available automatically to doctests."""
doctest_namespace['metpy'] = metpy
doctest_namespace['metpy.calc'] = metpy.calc
doctest_namespace['plt'] = matplotlib.pyplot
|
"""Configure pytest for metpy."""
import os
import matplotlib
import matplotlib.pyplot
import numpy
import pandas
import pooch
import pytest
import scipy
import traitlets
import xarray
import metpy.calc
# Need to disable fallback before importing pint
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
import pint # noqa: I100, E402
def pytest_report_header(config, startdir):
"""Add dependency information to pytest output."""
return (f'Dep Versions: Matplotlib {matplotlib.__version__}, '
f'NumPy {numpy.__version__}, SciPy {scipy.__version__}, '
f'Xarray {xarray.__version__}, Pint {pint.__version__}, '
f'Pandas {pandas.__version__}, Traitlets {traitlets.__version__}, '
f'Pooch {pooch.version.full_version}')
@pytest.fixture(autouse=True)
def doctest_available_modules(doctest_namespace):
"""Make modules available automatically to doctests."""
doctest_namespace['metpy'] = metpy
doctest_namespace['metpy.calc'] = metpy.calc
doctest_namespace['plt'] = matplotlib.pyplot
|
Print out all dependency versions at the start of pytest
|
TST: Print out all dependency versions at the start of pytest
|
Python
|
bsd-3-clause
|
Unidata/MetPy,dopplershift/MetPy,Unidata/MetPy,dopplershift/MetPy
|
python
|
## Code Before:
"""Configure pytest for metpy."""
import os
import matplotlib
import matplotlib.pyplot
import numpy
import pandas
import pytest
import scipy
import xarray
import metpy.calc
# Need to disable fallback before importing pint
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
import pint # noqa: I100, E402
def pytest_report_header(config, startdir):
"""Add dependency information to pytest output."""
return ('Dependencies: Matplotlib ({}), NumPy ({}), Pandas ({}), '
'Pint ({}), SciPy ({}), Xarray ({})'.format(matplotlib.__version__,
numpy.__version__, pandas.__version__,
pint.__version__, scipy.__version__,
xarray.__version__))
@pytest.fixture(autouse=True)
def doctest_available_modules(doctest_namespace):
"""Make modules available automatically to doctests."""
doctest_namespace['metpy'] = metpy
doctest_namespace['metpy.calc'] = metpy.calc
doctest_namespace['plt'] = matplotlib.pyplot
## Instruction:
TST: Print out all dependency versions at the start of pytest
## Code After:
"""Configure pytest for metpy."""
import os
import matplotlib
import matplotlib.pyplot
import numpy
import pandas
import pooch
import pytest
import scipy
import traitlets
import xarray
import metpy.calc
# Need to disable fallback before importing pint
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
import pint # noqa: I100, E402
def pytest_report_header(config, startdir):
"""Add dependency information to pytest output."""
return (f'Dep Versions: Matplotlib {matplotlib.__version__}, '
f'NumPy {numpy.__version__}, SciPy {scipy.__version__}, '
f'Xarray {xarray.__version__}, Pint {pint.__version__}, '
f'Pandas {pandas.__version__}, Traitlets {traitlets.__version__}, '
f'Pooch {pooch.version.full_version}')
@pytest.fixture(autouse=True)
def doctest_available_modules(doctest_namespace):
"""Make modules available automatically to doctests."""
doctest_namespace['metpy'] = metpy
doctest_namespace['metpy.calc'] = metpy.calc
doctest_namespace['plt'] = matplotlib.pyplot
|
// ... existing code ...
import matplotlib.pyplot
import numpy
import pandas
import pooch
import pytest
import scipy
import traitlets
import xarray
import metpy.calc
// ... modified code ...
def pytest_report_header(config, startdir):
"""Add dependency information to pytest output."""
return (f'Dep Versions: Matplotlib {matplotlib.__version__}, '
f'NumPy {numpy.__version__}, SciPy {scipy.__version__}, '
f'Xarray {xarray.__version__}, Pint {pint.__version__}, '
f'Pandas {pandas.__version__}, Traitlets {traitlets.__version__}, '
f'Pooch {pooch.version.full_version}')
@pytest.fixture(autouse=True)
// ... rest of the code ...
|
899e3c9f81a43dcb94e290ce0a86f128bd94effd
|
opps/channel/context_processors.py
|
opps/channel/context_processors.py
|
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
|
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
|
Apply filter channel published on menu list (channel context processors)
|
Apply filter channel published on menu list (channel context processors)
|
Python
|
mit
|
YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps
|
python
|
## Code Before:
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
## Instruction:
Apply filter channel published on menu list (channel context processors)
## Code After:
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
|
# ... existing code ...
from django.utils import timezone
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
opps_menu = Channel.objects.filter(date_available__lte=timezone.now(),
published=True)
return {'opps_menu': opps_menu}
# ... rest of the code ...
|
399430076227f42f5d168c5b2264933c32f4b52a
|
lib/ansible/release.py
|
lib/ansible/release.py
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
Update ansible version number to 2.8.0.dev0
|
Update ansible version number to 2.8.0.dev0
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
python
|
## Code Before:
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
## Instruction:
Update ansible version number to 2.8.0.dev0
## Code After:
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
// ... existing code ...
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
// ... rest of the code ...
|
1c6b06f240d4388b3e140e3d9ab610711616f539
|
src/python/expedient/clearinghouse/resources/models.py
|
src/python/expedient/clearinghouse/resources/models.py
|
'''
@author: jnaous
'''
from django.db import models
from expedient.clearinghouse.aggregate.models import Aggregate
from expedient.common.extendable.models import Extendable
from expedient.clearinghouse.slice.models import Slice
class Resource(Extendable):
'''
Generic model of a resource.
@param aggregate: The L{Aggregate} that controls/owns this resource
@type aggregate: L{models.ForeignKey} to L{Aggregate}
@param name: A human-readable name for the resource
@type name: L{str}
'''
name = models.CharField(max_length=200)
available = models.BooleanField("Available", default=True, editable=False)
status_change_timestamp = models.DateTimeField(editable=False)
aggregate = models.ForeignKey(
Aggregate, verbose_name="Aggregate the resource belongs to")
slice_set = models.ManyToManyField(
Slice, through="Sliver", verbose_name="Slices this resource is used in")
def __unicode__(self):
return u"Resource: %s belonging to aggregate %s." % (
self.name, self.aggregate)
class Sliver(Extendable):
'''
Information on the reservation of a particular resource for a slice.
'''
resource = models.ForeignKey(
Resource, verbose_name="Resource this sliver is part of")
slice = models.ForeignKey(
Slice, verbose_name="Slice this sliver is part of")
|
'''
@author: jnaous
'''
from django.db import models
from expedient.clearinghouse.aggregate.models import Aggregate
from expedient.common.extendable.models import Extendable
from expedient.clearinghouse.slice.models import Slice
from datetime import datetime
class Resource(Extendable):
'''
Generic model of a resource.
@param aggregate: The L{Aggregate} that controls/owns this resource
@type aggregate: L{models.ForeignKey} to L{Aggregate}
@param name: A human-readable name for the resource
@type name: L{str}
'''
name = models.CharField(max_length=200)
available = models.BooleanField("Available", default=True, editable=False)
status_change_timestamp = models.DateTimeField(
editable=False, auto_now_add=True)
aggregate = models.ForeignKey(
Aggregate, verbose_name="Aggregate the resource belongs to")
slice_set = models.ManyToManyField(
Slice, through="Sliver", verbose_name="Slices this resource is used in")
def update_timestamp(self):
self.status_change_timestamp = datetime.now()
def __unicode__(self):
return u"Resource: %s belonging to aggregate %s." % (
self.name, self.aggregate)
class Sliver(Extendable):
'''
Information on the reservation of a particular resource for a slice.
'''
resource = models.ForeignKey(
Resource, verbose_name="Resource this sliver is part of")
slice = models.ForeignKey(
Slice, verbose_name="Slice this sliver is part of")
|
Add functions to manage status change timestamp better
|
Add functions to manage status change timestamp better
|
Python
|
bsd-3-clause
|
avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf
|
python
|
## Code Before:
'''
@author: jnaous
'''
from django.db import models
from expedient.clearinghouse.aggregate.models import Aggregate
from expedient.common.extendable.models import Extendable
from expedient.clearinghouse.slice.models import Slice
class Resource(Extendable):
'''
Generic model of a resource.
@param aggregate: The L{Aggregate} that controls/owns this resource
@type aggregate: L{models.ForeignKey} to L{Aggregate}
@param name: A human-readable name for the resource
@type name: L{str}
'''
name = models.CharField(max_length=200)
available = models.BooleanField("Available", default=True, editable=False)
status_change_timestamp = models.DateTimeField(editable=False)
aggregate = models.ForeignKey(
Aggregate, verbose_name="Aggregate the resource belongs to")
slice_set = models.ManyToManyField(
Slice, through="Sliver", verbose_name="Slices this resource is used in")
def __unicode__(self):
return u"Resource: %s belonging to aggregate %s." % (
self.name, self.aggregate)
class Sliver(Extendable):
'''
Information on the reservation of a particular resource for a slice.
'''
resource = models.ForeignKey(
Resource, verbose_name="Resource this sliver is part of")
slice = models.ForeignKey(
Slice, verbose_name="Slice this sliver is part of")
## Instruction:
Add functions to manage status change timestamp better
## Code After:
'''
@author: jnaous
'''
from django.db import models
from expedient.clearinghouse.aggregate.models import Aggregate
from expedient.common.extendable.models import Extendable
from expedient.clearinghouse.slice.models import Slice
from datetime import datetime
class Resource(Extendable):
'''
Generic model of a resource.
@param aggregate: The L{Aggregate} that controls/owns this resource
@type aggregate: L{models.ForeignKey} to L{Aggregate}
@param name: A human-readable name for the resource
@type name: L{str}
'''
name = models.CharField(max_length=200)
available = models.BooleanField("Available", default=True, editable=False)
status_change_timestamp = models.DateTimeField(
editable=False, auto_now_add=True)
aggregate = models.ForeignKey(
Aggregate, verbose_name="Aggregate the resource belongs to")
slice_set = models.ManyToManyField(
Slice, through="Sliver", verbose_name="Slices this resource is used in")
def update_timestamp(self):
self.status_change_timestamp = datetime.now()
def __unicode__(self):
return u"Resource: %s belonging to aggregate %s." % (
self.name, self.aggregate)
class Sliver(Extendable):
'''
Information on the reservation of a particular resource for a slice.
'''
resource = models.ForeignKey(
Resource, verbose_name="Resource this sliver is part of")
slice = models.ForeignKey(
Slice, verbose_name="Slice this sliver is part of")
|
...
from expedient.clearinghouse.aggregate.models import Aggregate
from expedient.common.extendable.models import Extendable
from expedient.clearinghouse.slice.models import Slice
from datetime import datetime
class Resource(Extendable):
'''
...
name = models.CharField(max_length=200)
available = models.BooleanField("Available", default=True, editable=False)
status_change_timestamp = models.DateTimeField(
editable=False, auto_now_add=True)
aggregate = models.ForeignKey(
Aggregate, verbose_name="Aggregate the resource belongs to")
slice_set = models.ManyToManyField(
Slice, through="Sliver", verbose_name="Slices this resource is used in")
def update_timestamp(self):
self.status_change_timestamp = datetime.now()
def __unicode__(self):
return u"Resource: %s belonging to aggregate %s." % (
self.name, self.aggregate)
class Sliver(Extendable):
'''
Information on the reservation of a particular resource for a slice.
...
|
d257a1c2e1d577e7902d47696b9f98ece299e7dd
|
src/dnscap_common.h
|
src/dnscap_common.h
|
/*
* setup MY_BPFTIMEVAL as the timeval structure that bpf packets
* will be assoicated with packets from libpcap
*/
#ifdef __OpenBSD__
# define MY_BPFTIMEVAL bpf_timeval
#endif
#ifndef MY_BPFTIMEVAL
# define MY_BPFTIMEVAL timeval
#endif
typedef struct MY_BPFTIMEVAL my_bpftimeval;
/*
* Structure to contain IP addresses
*/
typedef struct {
int af;
union {
struct in_addr a4;
struct in6_addr a6;
} u;
} iaddr;
/*
* plugins can call the logerr() function in the main dnscap
* process.
*/
typedef int logerr_t(const char *fmt, ...);
/*
* Prototype for the plugin "output" function
*/
typedef void output_t(const char *descr,
iaddr from,
iaddr to,
uint8_t proto,
int isfrag,
unsigned sport,
unsigned dport,
my_bpftimeval ts,
const u_char *pkt_copy,
unsigned olen,
const u_char *dnspkt,
unsigned dnslen);
#define DIR_INITIATE 0x0001
#define DIR_RESPONSE 0x0002
|
/*
* setup MY_BPFTIMEVAL as the timeval structure that bpf packets
* will be assoicated with packets from libpcap
*/
#ifndef MY_BPFTIMEVAL
# define MY_BPFTIMEVAL timeval
#endif
typedef struct MY_BPFTIMEVAL my_bpftimeval;
/*
* Structure to contain IP addresses
*/
typedef struct {
int af;
union {
struct in_addr a4;
struct in6_addr a6;
} u;
} iaddr;
/*
* plugins can call the logerr() function in the main dnscap
* process.
*/
typedef int logerr_t(const char *fmt, ...);
/*
* Prototype for the plugin "output" function
*/
typedef void output_t(const char *descr,
iaddr from,
iaddr to,
uint8_t proto,
int isfrag,
unsigned sport,
unsigned dport,
my_bpftimeval ts,
const u_char *pkt_copy,
unsigned olen,
const u_char *dnspkt,
unsigned dnslen);
#define DIR_INITIATE 0x0001
#define DIR_RESPONSE 0x0002
|
Fix compilation on FreeBSD and OpenBSD
|
Fix compilation on FreeBSD and OpenBSD
|
C
|
isc
|
verisign/dnscap,verisign/dnscap
|
c
|
## Code Before:
/*
* setup MY_BPFTIMEVAL as the timeval structure that bpf packets
* will be assoicated with packets from libpcap
*/
#ifdef __OpenBSD__
# define MY_BPFTIMEVAL bpf_timeval
#endif
#ifndef MY_BPFTIMEVAL
# define MY_BPFTIMEVAL timeval
#endif
typedef struct MY_BPFTIMEVAL my_bpftimeval;
/*
* Structure to contain IP addresses
*/
typedef struct {
int af;
union {
struct in_addr a4;
struct in6_addr a6;
} u;
} iaddr;
/*
* plugins can call the logerr() function in the main dnscap
* process.
*/
typedef int logerr_t(const char *fmt, ...);
/*
* Prototype for the plugin "output" function
*/
typedef void output_t(const char *descr,
iaddr from,
iaddr to,
uint8_t proto,
int isfrag,
unsigned sport,
unsigned dport,
my_bpftimeval ts,
const u_char *pkt_copy,
unsigned olen,
const u_char *dnspkt,
unsigned dnslen);
#define DIR_INITIATE 0x0001
#define DIR_RESPONSE 0x0002
## Instruction:
Fix compilation on FreeBSD and OpenBSD
## Code After:
/*
* setup MY_BPFTIMEVAL as the timeval structure that bpf packets
* will be assoicated with packets from libpcap
*/
#ifndef MY_BPFTIMEVAL
# define MY_BPFTIMEVAL timeval
#endif
typedef struct MY_BPFTIMEVAL my_bpftimeval;
/*
* Structure to contain IP addresses
*/
typedef struct {
int af;
union {
struct in_addr a4;
struct in6_addr a6;
} u;
} iaddr;
/*
* plugins can call the logerr() function in the main dnscap
* process.
*/
typedef int logerr_t(const char *fmt, ...);
/*
* Prototype for the plugin "output" function
*/
typedef void output_t(const char *descr,
iaddr from,
iaddr to,
uint8_t proto,
int isfrag,
unsigned sport,
unsigned dport,
my_bpftimeval ts,
const u_char *pkt_copy,
unsigned olen,
const u_char *dnspkt,
unsigned dnslen);
#define DIR_INITIATE 0x0001
#define DIR_RESPONSE 0x0002
|
// ... existing code ...
/*
* setup MY_BPFTIMEVAL as the timeval structure that bpf packets
* will be assoicated with packets from libpcap
*/
#ifndef MY_BPFTIMEVAL
# define MY_BPFTIMEVAL timeval
#endif
// ... rest of the code ...
|
87b1d823f09a20547b08f769636bfc7bcc7f0766
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='rhino',
version='1.0',
packages=['rhino'],
include_package_data=True,
#requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup(<3.1)', 'jinja2'],
#install_requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup<3.1', 'jinja2'],
requires=['Django', 'south', 'jinja2', 'oauth2'],
)
|
from setuptools import setup
setup(
name='rhino',
version='1.0',
packages=['rhino'],
include_package_data=True,
#requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup(<3.1)', 'jinja2'],
#install_requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup<3.1', 'jinja2'],
requires=['Django', 'south', 'jinja2', 'oauth2', 'pytidylib', 'remoteobjects'],
)
|
Call out these requirements too
|
Call out these requirements too
|
Python
|
mit
|
markpasc/leapfrog,markpasc/leapfrog
|
python
|
## Code Before:
from setuptools import setup
setup(
name='rhino',
version='1.0',
packages=['rhino'],
include_package_data=True,
#requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup(<3.1)', 'jinja2'],
#install_requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup<3.1', 'jinja2'],
requires=['Django', 'south', 'jinja2', 'oauth2'],
)
## Instruction:
Call out these requirements too
## Code After:
from setuptools import setup
setup(
name='rhino',
version='1.0',
packages=['rhino'],
include_package_data=True,
#requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup(<3.1)', 'jinja2'],
#install_requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup<3.1', 'jinja2'],
requires=['Django', 'south', 'jinja2', 'oauth2', 'pytidylib', 'remoteobjects'],
)
|
# ... existing code ...
#requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup(<3.1)', 'jinja2'],
#install_requires=['Django', 'south', 'celery', 'django_celery', 'httplib2', 'passogva', 'BeautifulSoup<3.1', 'jinja2'],
requires=['Django', 'south', 'jinja2', 'oauth2', 'pytidylib', 'remoteobjects'],
)
# ... rest of the code ...
|
166c1a4dde981d5bd7d20a00c8329d7bbb4a3c00
|
nipype/interfaces/setup.py
|
nipype/interfaces/setup.py
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Remove reference to non-existing data directory.
|
Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
arokem/nipype,gerddie/nipype,iglpdc/nipype,pearsonlab/nipype,carlohamalainen/nipype,Leoniela/nipype,glatard/nipype,dgellis90/nipype,satra/NiPypeold,glatard/nipype,mick-d/nipype,carolFrohlich/nipype,blakedewey/nipype,dgellis90/nipype,pearsonlab/nipype,mick-d/nipype_source,FCP-INDI/nipype,dgellis90/nipype,carolFrohlich/nipype,FredLoney/nipype,arokem/nipype,rameshvs/nipype,arokem/nipype,gerddie/nipype,carolFrohlich/nipype,gerddie/nipype,rameshvs/nipype,mick-d/nipype,sgiavasis/nipype,carlohamalainen/nipype,dmordom/nipype,wanderine/nipype,fprados/nipype,grlee77/nipype,carlohamalainen/nipype,blakedewey/nipype,mick-d/nipype_source,Leoniela/nipype,FredLoney/nipype,dgellis90/nipype,sgiavasis/nipype,rameshvs/nipype,blakedewey/nipype,dmordom/nipype,satra/NiPypeold,dmordom/nipype,sgiavasis/nipype,christianbrodbeck/nipype,FredLoney/nipype,grlee77/nipype,fprados/nipype,iglpdc/nipype,iglpdc/nipype,JohnGriffiths/nipype,sgiavasis/nipype,gerddie/nipype,grlee77/nipype,christianbrodbeck/nipype,wanderine/nipype,fprados/nipype,FCP-INDI/nipype,arokem/nipype,JohnGriffiths/nipype,wanderine/nipype,carolFrohlich/nipype,FCP-INDI/nipype,pearsonlab/nipype,glatard/nipype,JohnGriffiths/nipype,blakedewey/nipype,grlee77/nipype,rameshvs/nipype,glatard/nipype,Leoniela/nipype,pearsonlab/nipype,mick-d/nipype_source,FCP-INDI/nipype,mick-d/nipype,iglpdc/nipype,wanderine/nipype,JohnGriffiths/nipype,mick-d/nipype
|
python
|
## Code Before:
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
## Instruction:
Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00
## Code After:
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
...
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
...
|
a91a04af6b95fa600a0b3ce74b5fffc07ecf590e
|
polymorphic/__init__.py
|
polymorphic/__init__.py
|
# See PEP 440 (https://www.python.org/dev/peps/pep-0440/)
__version__ = "1.3"
|
import pkg_resources
__version__ = pkg_resources.require("django-polymorphic")[0].version
|
Set polymorphic.__version__ from setuptools metadata
|
Set polymorphic.__version__ from setuptools metadata
|
Python
|
bsd-3-clause
|
skirsdeda/django_polymorphic,skirsdeda/django_polymorphic,skirsdeda/django_polymorphic,chrisglass/django_polymorphic,chrisglass/django_polymorphic
|
python
|
## Code Before:
# See PEP 440 (https://www.python.org/dev/peps/pep-0440/)
__version__ = "1.3"
## Instruction:
Set polymorphic.__version__ from setuptools metadata
## Code After:
import pkg_resources
__version__ = pkg_resources.require("django-polymorphic")[0].version
|
# ... existing code ...
import pkg_resources
__version__ = pkg_resources.require("django-polymorphic")[0].version
# ... rest of the code ...
|
fdd69cb0b7b11fce9cfc70d85e51a29aaabc0ee0
|
wagtailmenus/management/commands/autopopulate_main_menus.py
|
wagtailmenus/management/commands/autopopulate_main_menus.py
|
import logging
from django.core.management.base import BaseCommand
from wagtail.wagtailcore.models import Site
from wagtailmenus import app_settings
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = (
"Create a 'main menu' for any 'Site' that doesn't already have one. "
"If main menus for any site do not have menu items, identify the "
"'home' and 'section root' pages for the site, and menu items linking "
"to those to the menu. Assumes 'site.root_page' is the 'home page' "
"and its children are the 'section root' pages")
def add_arguments(self, parser):
parser.add_argument(
'--add-home-links',
action='store_true',
dest='add-home-links',
default=True,
help="Add menu items for 'home' pages",
)
def handle(self, *args, **options):
for site in Site.objects.all():
menu = app_settings.MAIN_MENU_MODEL_CLASS.get_for_site(site)
if not menu.get_menu_items_manager().exists():
menu.add_menu_items_for_pages(
site.root_page.get_descendants(
inclusive=options['add-home-links']
).filter(depth__lte=3)
)
|
import logging
from django.core.management.base import BaseCommand
from wagtail.wagtailcore.models import Site
from wagtailmenus import app_settings
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = (
"Create a 'main menu' for any 'Site' that doesn't already have one. "
"If main menus for any site do not have menu items, identify the "
"'home' and 'section root' pages for the site, and menu items linking "
"to those to the menu. Assumes 'site.root_page' is the 'home page' "
"and its children are the 'section root' pages")
def add_arguments(self, parser):
parser.add_argument(
'--add-home-links',
action='store_true',
dest='add-home-links',
default=True,
help="Add menu items for 'home' pages",
)
def handle(self, *args, **options):
for site in Site.objects.all():
menu = app_settings.MAIN_MENU_MODEL_CLASS.get_for_site(site)
if not menu.get_menu_items_manager().exists():
menu.add_menu_items_for_pages(
site.root_page.get_descendants(
inclusive=options['add-home-links']
).filter(depth__lte=site.root_page.depth + 1)
)
|
Use the root_page.depth to determine filter value to identify section root pages
|
Use the root_page.depth to determine filter value to identify section root pages
|
Python
|
mit
|
rkhleics/wagtailmenus,ababic/wagtailmenus,rkhleics/wagtailmenus,ababic/wagtailmenus,rkhleics/wagtailmenus,ababic/wagtailmenus
|
python
|
## Code Before:
import logging
from django.core.management.base import BaseCommand
from wagtail.wagtailcore.models import Site
from wagtailmenus import app_settings
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = (
"Create a 'main menu' for any 'Site' that doesn't already have one. "
"If main menus for any site do not have menu items, identify the "
"'home' and 'section root' pages for the site, and menu items linking "
"to those to the menu. Assumes 'site.root_page' is the 'home page' "
"and its children are the 'section root' pages")
def add_arguments(self, parser):
parser.add_argument(
'--add-home-links',
action='store_true',
dest='add-home-links',
default=True,
help="Add menu items for 'home' pages",
)
def handle(self, *args, **options):
for site in Site.objects.all():
menu = app_settings.MAIN_MENU_MODEL_CLASS.get_for_site(site)
if not menu.get_menu_items_manager().exists():
menu.add_menu_items_for_pages(
site.root_page.get_descendants(
inclusive=options['add-home-links']
).filter(depth__lte=3)
)
## Instruction:
Use the root_page.depth to determine filter value to identify section root pages
## Code After:
import logging
from django.core.management.base import BaseCommand
from wagtail.wagtailcore.models import Site
from wagtailmenus import app_settings
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = (
"Create a 'main menu' for any 'Site' that doesn't already have one. "
"If main menus for any site do not have menu items, identify the "
"'home' and 'section root' pages for the site, and menu items linking "
"to those to the menu. Assumes 'site.root_page' is the 'home page' "
"and its children are the 'section root' pages")
def add_arguments(self, parser):
parser.add_argument(
'--add-home-links',
action='store_true',
dest='add-home-links',
default=True,
help="Add menu items for 'home' pages",
)
def handle(self, *args, **options):
for site in Site.objects.all():
menu = app_settings.MAIN_MENU_MODEL_CLASS.get_for_site(site)
if not menu.get_menu_items_manager().exists():
menu.add_menu_items_for_pages(
site.root_page.get_descendants(
inclusive=options['add-home-links']
).filter(depth__lte=site.root_page.depth + 1)
)
|
...
menu.add_menu_items_for_pages(
site.root_page.get_descendants(
inclusive=options['add-home-links']
).filter(depth__lte=site.root_page.depth + 1)
)
...
|
84dee56df90d9181d1e79c3246ef389462f0ca17
|
configure_console_session.py
|
configure_console_session.py
|
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
|
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
|
Put imports as they are in init
|
Put imports as they are in init
|
Python
|
bsd-3-clause
|
nickpowersys/CaaR
|
python
|
## Code Before:
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from comfort import cleanthermostat as ct
from comfort import history as hi
from comfort import histdaily as hd
from comfort import histsummary as hs
## Instruction:
Put imports as they are in init
## Code After:
import sys
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/caar'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports'
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
|
// ... existing code ...
sys.path.append(PYTHONPATH)
PYTHONPATH = '/home/nick/PycharmProjs/tl_cycling/backports/configparser'
sys.path.append(PYTHONPATH)
from caar.cleanthermostat import dict_from_file
from caar.cleanthermostat import detect_columns
from caar.cleanthermostat import pickle_from_file
from caar.history import create_cycles_df
from caar.history import create_inside_df
from caar.history import create_outside_df
from caar.history import random_record
from caar.histsummary import days_of_data_by_id
from caar.histsummary import consecutive_days_of_observations
from caar.histsummary import daily_cycle_and_temp_obs_counts
from caar.histsummary import daily_data_points_by_id
from caar.histsummary import df_select_ids
from caar.histsummary import df_select_datetime_range
from caar.histsummary import count_of_data_points_for_each_id
from caar.histsummary import count_of_data_points_for_select_id
from caar.histsummary import location_id_of_thermo
from caar.timeseries import time_series_cycling_and_temps
from caar.timeseries import on_off_status
from caar.timeseries import temps_arr_by_freq
from caar.timeseries import plot_cycles_xy
from caar.timeseries import plot_temps_xy
// ... rest of the code ...
|
172cc253bde8c9b0e827481ea0219ce51aed0c7c
|
src/main/java/com/jvm_bloggers/frontend/common_components/toastr/ToastrBehavior.java
|
src/main/java/com/jvm_bloggers/frontend/common_components/toastr/ToastrBehavior.java
|
package com.jvm_bloggers.frontend.common_components.toastr;
//import de.agilecoders.wicket.webjars.request.resource.WebjarsCssResourceReference;
//import de.agilecoders.wicket.webjars.request.resource.WebjarsJavaScriptResourceReference;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.head.CssHeaderItem;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
public class ToastrBehavior extends Behavior {
private static final String TOASTR_CSS = "toastr/current/build/toastr.min.css";
private static final String TOASTR_JS = "toastr/current/build/toastr.min.js";
@Override
public void renderHead(Component component, IHeaderResponse response) {
super.renderHead(component, response);
// response.render(CssHeaderItem.forReference(
// new WebjarsCssResourceReference(TOASTR_CSS)));
// response.render(JavaScriptHeaderItem.forReference(
// new WebjarsJavaScriptResourceReference(TOASTR_JS)));
}
}
|
package com.jvm_bloggers.frontend.common_components.toastr;
import de.agilecoders.wicket.webjars.request.resource.WebjarsCssResourceReference;
import de.agilecoders.wicket.webjars.request.resource.WebjarsJavaScriptResourceReference;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.head.CssHeaderItem;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
public class ToastrBehavior extends Behavior {
private static final String TOASTR_CSS = "toastr/current/build/toastr.min.css";
private static final String TOASTR_JS = "toastr/current/build/toastr.min.js";
@Override
public void renderHead(Component component, IHeaderResponse response) {
super.renderHead(component, response);
response.render(CssHeaderItem.forReference(
new WebjarsCssResourceReference(TOASTR_CSS)));
response.render(JavaScriptHeaderItem.forReference(
new WebjarsJavaScriptResourceReference(TOASTR_JS)));
}
}
|
Fix not working toastr popups in Varia page
|
Fix not working toastr popups in Varia page
|
Java
|
mit
|
tdziurko/jvm-bloggers,tdziurko/jvm-bloggers,jvm-bloggers/jvm-bloggers,szpak/jvm-bloggers,szpak/jvm-bloggers,szpak/jvm-bloggers,szpak/jvm-bloggers,kraluk/jvm-bloggers,jvm-bloggers/jvm-bloggers,tdziurko/jvm-bloggers,kraluk/jvm-bloggers,jvm-bloggers/jvm-bloggers,kraluk/jvm-bloggers,jvm-bloggers/jvm-bloggers,tdziurko/jvm-bloggers,kraluk/jvm-bloggers
|
java
|
## Code Before:
package com.jvm_bloggers.frontend.common_components.toastr;
//import de.agilecoders.wicket.webjars.request.resource.WebjarsCssResourceReference;
//import de.agilecoders.wicket.webjars.request.resource.WebjarsJavaScriptResourceReference;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.head.CssHeaderItem;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
public class ToastrBehavior extends Behavior {
private static final String TOASTR_CSS = "toastr/current/build/toastr.min.css";
private static final String TOASTR_JS = "toastr/current/build/toastr.min.js";
@Override
public void renderHead(Component component, IHeaderResponse response) {
super.renderHead(component, response);
// response.render(CssHeaderItem.forReference(
// new WebjarsCssResourceReference(TOASTR_CSS)));
// response.render(JavaScriptHeaderItem.forReference(
// new WebjarsJavaScriptResourceReference(TOASTR_JS)));
}
}
## Instruction:
Fix not working toastr popups in Varia page
## Code After:
package com.jvm_bloggers.frontend.common_components.toastr;
import de.agilecoders.wicket.webjars.request.resource.WebjarsCssResourceReference;
import de.agilecoders.wicket.webjars.request.resource.WebjarsJavaScriptResourceReference;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.head.CssHeaderItem;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
public class ToastrBehavior extends Behavior {
private static final String TOASTR_CSS = "toastr/current/build/toastr.min.css";
private static final String TOASTR_JS = "toastr/current/build/toastr.min.js";
@Override
public void renderHead(Component component, IHeaderResponse response) {
super.renderHead(component, response);
response.render(CssHeaderItem.forReference(
new WebjarsCssResourceReference(TOASTR_CSS)));
response.render(JavaScriptHeaderItem.forReference(
new WebjarsJavaScriptResourceReference(TOASTR_JS)));
}
}
|
# ... existing code ...
package com.jvm_bloggers.frontend.common_components.toastr;
import de.agilecoders.wicket.webjars.request.resource.WebjarsCssResourceReference;
import de.agilecoders.wicket.webjars.request.resource.WebjarsJavaScriptResourceReference;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.head.CssHeaderItem;
# ... modified code ...
public class ToastrBehavior extends Behavior {
private static final String TOASTR_CSS = "toastr/current/build/toastr.min.css";
private static final String TOASTR_JS = "toastr/current/build/toastr.min.js";
@Override
public void renderHead(Component component, IHeaderResponse response) {
super.renderHead(component, response);
response.render(CssHeaderItem.forReference(
new WebjarsCssResourceReference(TOASTR_CSS)));
response.render(JavaScriptHeaderItem.forReference(
new WebjarsJavaScriptResourceReference(TOASTR_JS)));
}
}
# ... rest of the code ...
|
c07bacb73eec4b963ec53c067f23385dad246fb6
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
|
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Add classifiers to zencoder-py package
|
Add classifiers to zencoder-py package
|
Python
|
mit
|
zencoder/zencoder-py
|
python
|
## Code Before:
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
## Instruction:
Add classifiers to zencoder-py package
## Code After:
from distutils.core import setup
setup(name='zencoder',
version='0.5.2',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='[email protected]',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
// ... existing code ...
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder'],
platforms='any',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
// ... rest of the code ...
|
b0efd28627c7789885f5bbd215622635ea8b5a63
|
src/main/java/nl/wjl/template/springbootjpa/domain/Person.java
|
src/main/java/nl/wjl/template/springbootjpa/domain/Person.java
|
package nl.wjl.template.springbootjpa.domain;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
* Created by Wouter on 18-2-2016.
*/
@Entity
public class Person {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Integer id;
private String firstName;
private String lastName;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
}
|
package nl.wjl.template.springbootjpa.domain;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
* Created by Wouter on 18-2-2016.
*/
@Entity
public class Person {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String firstName;
private String lastName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
}
|
Change id from Integer to Long
|
Change id from Integer to Long
|
Java
|
apache-2.0
|
WouterLangerak/spring-boot-data-jpa-example
|
java
|
## Code Before:
package nl.wjl.template.springbootjpa.domain;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
* Created by Wouter on 18-2-2016.
*/
@Entity
public class Person {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Integer id;
private String firstName;
private String lastName;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
}
## Instruction:
Change id from Integer to Long
## Code After:
package nl.wjl.template.springbootjpa.domain;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
* Created by Wouter on 18-2-2016.
*/
@Entity
public class Person {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String firstName;
private String lastName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
}
|
// ... existing code ...
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String firstName;
private String lastName;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
// ... rest of the code ...
|
3919d64370825d8931672011af4b99355e52ef63
|
motobot/core_plugins/help.py
|
motobot/core_plugins/help.py
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
|
from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
Update to new response handling
|
Update to new response handling
|
Python
|
mit
|
Motoko11/MotoBot
|
python
|
## Code Before:
from motobot import IRCBot, command, Notice
def get_command_help(bot, command, modifier):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append((' '.join(func.__doc__.split()), modifier))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
modifier = Notice(nick)
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
response = (response, modifier)
else:
response = get_command_help(bot, args[1], modifier)
if response == []:
response = ("There is no help entry for the command: {}.".format(args[1]), modifier)
return response
## Instruction:
Update to new response handling
## Code After:
from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
for plugin in filter(func, bot.plugins):
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
@command('help')
def help_command(bot, database, nick, channel, message, args):
""" Print help messages for the user.
Takes a single argument for a command name.
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
|
...
from motobot import IRCBot, command, Notice
def get_command_help(bot, command):
responses = []
func = lambda x: x.type == IRCBot.command_plugin and x.arg.lower() == command.lower()
...
func = plugin.func
if func.__doc__ is not None:
responses.append(' '.join(func.__doc__.split()))
return responses
...
No arguments gives a generic help message.
"""
response = None
if len(args) <= 1:
default_help = "For help on a specific command use '!help command'."
response = bot.default_help \
if bot.default_help is not None else default_help
else:
response = get_command_help(bot, args[1])
if response == []:
response = "There is no help entry for the command: {}.".format(args[1])
return response, Notice(nick)
...
|
240a18f3955c0c2749c867b32c231b19672d83e0
|
src/problem.h
|
src/problem.h
|
class Problem {
public:
int objcnt; // Number of objectives
double* rhs;
int** objind; // Objective indices
double** objcoef; // Objective coefficients
Sense objsen; // Objective sense. Note that all objectives must have the same
// sense (i.e., either all objectives are to be minimised, or
// all objectives are to be maximised).
int* conind;
char* consense;
~Problem();
};
inline Problem::~Problem() {
for(int j = 0; j < objcnt; ++j) {
delete[] objind[j];
delete[] objcoef[j];
}
delete[] objind;
delete[] objcoef;
delete[] rhs;
delete[] conind;
delete[] consense;
}
#endif /* PROBLEM_H */
|
class Problem {
public:
int objcnt; // Number of objectives
double* rhs;
int** objind; // Objective indices
double** objcoef; // Objective coefficients
Sense objsen; // Objective sense. Note that all objectives must have the same
// sense (i.e., either all objectives are to be minimised, or
// all objectives are to be maximised).
int* conind;
char* consense;
Problem();
~Problem();
};
inline Problem::Problem() : objcnt(0) { }
inline Problem::~Problem() {
// If objcnt == 0, then no problem has been assigned and no memory allocated
if (objcnt == 0)
return;
for(int j = 0; j < objcnt; ++j) {
delete[] objind[j];
delete[] objcoef[j];
}
delete[] objind;
delete[] objcoef;
delete[] rhs;
delete[] conind;
delete[] consense;
}
#endif /* PROBLEM_H */
|
Fix invalid delete[] calls in ~Problem
|
Fix invalid delete[] calls in ~Problem
If objcnt is 0, then no memory has been allocated.
|
C
|
bsd-2-clause
|
WPettersson/moip_aira,WPettersson/moip_aira,WPettersson/moip_aira,WPettersson/moip_aira
|
c
|
## Code Before:
class Problem {
public:
int objcnt; // Number of objectives
double* rhs;
int** objind; // Objective indices
double** objcoef; // Objective coefficients
Sense objsen; // Objective sense. Note that all objectives must have the same
// sense (i.e., either all objectives are to be minimised, or
// all objectives are to be maximised).
int* conind;
char* consense;
~Problem();
};
inline Problem::~Problem() {
for(int j = 0; j < objcnt; ++j) {
delete[] objind[j];
delete[] objcoef[j];
}
delete[] objind;
delete[] objcoef;
delete[] rhs;
delete[] conind;
delete[] consense;
}
#endif /* PROBLEM_H */
## Instruction:
Fix invalid delete[] calls in ~Problem
If objcnt is 0, then no memory has been allocated.
## Code After:
class Problem {
public:
int objcnt; // Number of objectives
double* rhs;
int** objind; // Objective indices
double** objcoef; // Objective coefficients
Sense objsen; // Objective sense. Note that all objectives must have the same
// sense (i.e., either all objectives are to be minimised, or
// all objectives are to be maximised).
int* conind;
char* consense;
Problem();
~Problem();
};
inline Problem::Problem() : objcnt(0) { }
inline Problem::~Problem() {
// If objcnt == 0, then no problem has been assigned and no memory allocated
if (objcnt == 0)
return;
for(int j = 0; j < objcnt; ++j) {
delete[] objind[j];
delete[] objcoef[j];
}
delete[] objind;
delete[] objcoef;
delete[] rhs;
delete[] conind;
delete[] consense;
}
#endif /* PROBLEM_H */
|
...
int* conind;
char* consense;
Problem();
~Problem();
};
inline Problem::Problem() : objcnt(0) { }
inline Problem::~Problem() {
// If objcnt == 0, then no problem has been assigned and no memory allocated
if (objcnt == 0)
return;
for(int j = 0; j < objcnt; ++j) {
delete[] objind[j];
delete[] objcoef[j];
...
|
2aae7b1718bfc21267922f7fe09dcf47be69582b
|
blueprints/aws_rds_instance/delete_aws_rds_instance.py
|
blueprints/aws_rds_instance/delete_aws_rds_instance.py
|
import json
import boto3
from infrastructure.models import Environment
def run(job, logger=None, **kwargs):
service = job.service_set.first()
env_id = service.attributes.get(field__name__startswith='aws_environment').value
env = Environment.objects.get(id=env_id)
rh = env.resource_handler.cast()
job.set_progress('Connecting to AWS...')
client = boto3.client(
'rds',
region_name=env.aws_region,
aws_access_key_id=rh.serviceaccount,
aws_secret_access_key=rh.servicepasswd)
instance_cfv = service.attributes.get(field__name='rds_instance')
instance = json.loads(instance_cfv.value)
identifier = instance['identifier']
job.set_progress('Deleting RDS instance {}...'.format(identifier))
response = client.delete_db_instance(
DBInstanceIdentifier=identifier,
# AWS strongly recommends taking a final snapshot before deleting a DB.
# To do so, either set this to False or let the user choose by making it
# a runtime action input (in that case be sure to set the param type to
# Boolean so users get a dropdown).
SkipFinalSnapshot=True,
)
job.set_progress('RDS instance {0} deleted.'.format(identifier))
return 'SUCCESS', '', ''
|
import json
import boto3
from infrastructure.models import Environment
def run(job, logger=None, **kwargs):
service = job.service_set.first()
# The Environment ID and RDS Instance data dict were stored as attributes on
# this service by a build action.
env_id_cfv = service.attributes.get(field__name__startswith='aws_environment')
instance_cfv = service.attributes.get(field__name='rds_instance')
env = Environment.objects.get(id=env_id_cfv.value)
client = connect_to_rds(env)
instance = json.loads(instance_cfv.value)
identifier = instance['identifier']
job.set_progress('Deleting RDS instance {0}...'.format(identifier))
response = client.delete_db_instance(
DBInstanceIdentifier=identifier,
# AWS strongly recommends taking a final snapshot before deleting a DB.
# To do so, either set this to False or let the user choose by making it
# a runtime action input (in that case be sure to set the param type to
# Boolean so users get a dropdown).
SkipFinalSnapshot=True,
)
job.set_progress('RDS instance {0} deleted.'.format(identifier))
return 'SUCCESS', '', ''
def connect_to_rds(env):
"""
Return boto connection to the RDS in the specified environment's region.
"""
job.set_progress('Connecting to AWS RDS in region {0}.'.format(env.aws_region))
rh = env.resource_handler.cast()
return boto3.client(
'rds',
region_name=env.aws_region,
aws_access_key_id=rh.serviceaccount,
aws_secret_access_key=rh.servicepasswd)
|
Use code consistent with other RDS actions
|
Use code consistent with other RDS actions
[#144244831]
|
Python
|
apache-2.0
|
CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge
|
python
|
## Code Before:
import json
import boto3
from infrastructure.models import Environment
def run(job, logger=None, **kwargs):
service = job.service_set.first()
env_id = service.attributes.get(field__name__startswith='aws_environment').value
env = Environment.objects.get(id=env_id)
rh = env.resource_handler.cast()
job.set_progress('Connecting to AWS...')
client = boto3.client(
'rds',
region_name=env.aws_region,
aws_access_key_id=rh.serviceaccount,
aws_secret_access_key=rh.servicepasswd)
instance_cfv = service.attributes.get(field__name='rds_instance')
instance = json.loads(instance_cfv.value)
identifier = instance['identifier']
job.set_progress('Deleting RDS instance {}...'.format(identifier))
response = client.delete_db_instance(
DBInstanceIdentifier=identifier,
# AWS strongly recommends taking a final snapshot before deleting a DB.
# To do so, either set this to False or let the user choose by making it
# a runtime action input (in that case be sure to set the param type to
# Boolean so users get a dropdown).
SkipFinalSnapshot=True,
)
job.set_progress('RDS instance {0} deleted.'.format(identifier))
return 'SUCCESS', '', ''
## Instruction:
Use code consistent with other RDS actions
[#144244831]
## Code After:
import json
import boto3
from infrastructure.models import Environment
def run(job, logger=None, **kwargs):
service = job.service_set.first()
# The Environment ID and RDS Instance data dict were stored as attributes on
# this service by a build action.
env_id_cfv = service.attributes.get(field__name__startswith='aws_environment')
instance_cfv = service.attributes.get(field__name='rds_instance')
env = Environment.objects.get(id=env_id_cfv.value)
client = connect_to_rds(env)
instance = json.loads(instance_cfv.value)
identifier = instance['identifier']
job.set_progress('Deleting RDS instance {0}...'.format(identifier))
response = client.delete_db_instance(
DBInstanceIdentifier=identifier,
# AWS strongly recommends taking a final snapshot before deleting a DB.
# To do so, either set this to False or let the user choose by making it
# a runtime action input (in that case be sure to set the param type to
# Boolean so users get a dropdown).
SkipFinalSnapshot=True,
)
job.set_progress('RDS instance {0} deleted.'.format(identifier))
return 'SUCCESS', '', ''
def connect_to_rds(env):
"""
Return boto connection to the RDS in the specified environment's region.
"""
job.set_progress('Connecting to AWS RDS in region {0}.'.format(env.aws_region))
rh = env.resource_handler.cast()
return boto3.client(
'rds',
region_name=env.aws_region,
aws_access_key_id=rh.serviceaccount,
aws_secret_access_key=rh.servicepasswd)
|
# ... existing code ...
def run(job, logger=None, **kwargs):
service = job.service_set.first()
# The Environment ID and RDS Instance data dict were stored as attributes on
# this service by a build action.
env_id_cfv = service.attributes.get(field__name__startswith='aws_environment')
instance_cfv = service.attributes.get(field__name='rds_instance')
env = Environment.objects.get(id=env_id_cfv.value)
client = connect_to_rds(env)
instance = json.loads(instance_cfv.value)
identifier = instance['identifier']
job.set_progress('Deleting RDS instance {0}...'.format(identifier))
response = client.delete_db_instance(
DBInstanceIdentifier=identifier,
# AWS strongly recommends taking a final snapshot before deleting a DB.
# ... modified code ...
job.set_progress('RDS instance {0} deleted.'.format(identifier))
return 'SUCCESS', '', ''
def connect_to_rds(env):
"""
Return boto connection to the RDS in the specified environment's region.
"""
job.set_progress('Connecting to AWS RDS in region {0}.'.format(env.aws_region))
rh = env.resource_handler.cast()
return boto3.client(
'rds',
region_name=env.aws_region,
aws_access_key_id=rh.serviceaccount,
aws_secret_access_key=rh.servicepasswd)
# ... rest of the code ...
|
8127c6cf30e3b1410b8194773a07fdfb89435cee
|
gwt-ol3-client/src/test/java/ol/geom/GeometryCollectionTest.java
|
gwt-ol3-client/src/test/java/ol/geom/GeometryCollectionTest.java
|
package ol.geom;
import ol.*;
/**
* A test case for {@link GeometryCollection}.
*
* @author sbaumhekel
*/
public class GeometryCollectionTest extends BaseTestCase {
public void test() {
Geometry[] geoms = new Geometry[] { OLFactory.createPoint(1, 2), OLFactory.createLineString(
new Coordinate[] { OLFactory.createCoordinate(1, 2), OLFactory.createCoordinate(2, 3) }) };
GeometryCollection col = OLFactory.createGeometryCollection(geoms);
assertNotNull(col);
Geometry[] geoms2 = col.getGeometries();
assertNotNull(geoms2);
assertEquals(2, geoms2.length);
Geometry g1 = geoms2[0];
Geometry g2 = geoms2[1];
assertNotNull(g1);
assertNotNull(g2);
assertEquals(Geometry.POINT, g1.getType());
assertEquals(Geometry.LINE_STRING, g2.getType());
}
}
|
package ol.geom;
import ol.Coordinate;
import ol.GwtOL3BaseTestCase;
import ol.OLFactory;
/**
* A test case for {@link GeometryCollection}.
*
* @author sbaumhekel
*/
public class GeometryCollectionTest extends GwtOL3BaseTestCase {
public void test() {
Geometry[] geoms = new Geometry[] { OLFactory.createPoint(1, 2), OLFactory.createLineString(
new Coordinate[] { OLFactory.createCoordinate(1, 2), OLFactory.createCoordinate(2, 3) }) };
GeometryCollection col = OLFactory.createGeometryCollection(geoms);
assertNotNull(col);
Geometry[] geoms2 = col.getGeometries();
assertNotNull(geoms2);
assertEquals(2, geoms2.length);
Geometry g1 = geoms2[0];
Geometry g2 = geoms2[1];
assertNotNull(g1);
assertNotNull(g2);
assertEquals("Point", g1.getType());
assertEquals("LineString", g2.getType());
}
}
|
Update test for GWT 2.8
|
Update test for GWT 2.8
|
Java
|
apache-2.0
|
sebasbaumh/gwt-ol3,sebasbaumh/gwt-ol3,mazlixek/gwt-ol3,TDesjardins/gwt-ol3,mazlixek/gwt-ol3,TDesjardins/GWT-OL3-Playground,TDesjardins/gwt-ol3,TDesjardins/gwt-ol3,TDesjardins/GWT-OL3-Playground,sebasbaumh/gwt-ol3,TDesjardins/GWT-OL3-Playground,mazlixek/gwt-ol3
|
java
|
## Code Before:
package ol.geom;
import ol.*;
/**
* A test case for {@link GeometryCollection}.
*
* @author sbaumhekel
*/
public class GeometryCollectionTest extends BaseTestCase {
public void test() {
Geometry[] geoms = new Geometry[] { OLFactory.createPoint(1, 2), OLFactory.createLineString(
new Coordinate[] { OLFactory.createCoordinate(1, 2), OLFactory.createCoordinate(2, 3) }) };
GeometryCollection col = OLFactory.createGeometryCollection(geoms);
assertNotNull(col);
Geometry[] geoms2 = col.getGeometries();
assertNotNull(geoms2);
assertEquals(2, geoms2.length);
Geometry g1 = geoms2[0];
Geometry g2 = geoms2[1];
assertNotNull(g1);
assertNotNull(g2);
assertEquals(Geometry.POINT, g1.getType());
assertEquals(Geometry.LINE_STRING, g2.getType());
}
}
## Instruction:
Update test for GWT 2.8
## Code After:
package ol.geom;
import ol.Coordinate;
import ol.GwtOL3BaseTestCase;
import ol.OLFactory;
/**
* A test case for {@link GeometryCollection}.
*
* @author sbaumhekel
*/
public class GeometryCollectionTest extends GwtOL3BaseTestCase {
public void test() {
Geometry[] geoms = new Geometry[] { OLFactory.createPoint(1, 2), OLFactory.createLineString(
new Coordinate[] { OLFactory.createCoordinate(1, 2), OLFactory.createCoordinate(2, 3) }) };
GeometryCollection col = OLFactory.createGeometryCollection(geoms);
assertNotNull(col);
Geometry[] geoms2 = col.getGeometries();
assertNotNull(geoms2);
assertEquals(2, geoms2.length);
Geometry g1 = geoms2[0];
Geometry g2 = geoms2[1];
assertNotNull(g1);
assertNotNull(g2);
assertEquals("Point", g1.getType());
assertEquals("LineString", g2.getType());
}
}
|
# ... existing code ...
package ol.geom;
import ol.Coordinate;
import ol.GwtOL3BaseTestCase;
import ol.OLFactory;
/**
* A test case for {@link GeometryCollection}.
*
* @author sbaumhekel
*/
public class GeometryCollectionTest extends GwtOL3BaseTestCase {
public void test() {
Geometry[] geoms = new Geometry[] { OLFactory.createPoint(1, 2), OLFactory.createLineString(
new Coordinate[] { OLFactory.createCoordinate(1, 2), OLFactory.createCoordinate(2, 3) }) };
GeometryCollection col = OLFactory.createGeometryCollection(geoms);
assertNotNull(col);
Geometry[] geoms2 = col.getGeometries();
assertNotNull(geoms2);
assertEquals(2, geoms2.length);
Geometry g1 = geoms2[0];
Geometry g2 = geoms2[1];
assertNotNull(g1);
assertNotNull(g2);
assertEquals("Point", g1.getType());
assertEquals("LineString", g2.getType());
}
}
# ... rest of the code ...
|
e94503e25bff0ba986c28ce3f16636b3bb9f2c3d
|
green_django/__init__.py
|
green_django/__init__.py
|
import sys
from utils import module_exists
from gevent import monkey
def make_django_green():
monkey.patch_all()
if module_exists('psycogreen'):
from psycogreen.gevent.psyco_gevent import make_psycopg_green
make_psycopg_green()
if module_exists('pymysql'):
import pymysql
pymysql.install_as_MySQLdb()
if module_exists('zmq'):
from gevent_zeromq import zmq
sys.modules["zmq"] = zmq
|
import sys
from utils import module_exists
from gevent import monkey
def make_django_green():
monkey.patch_all()
if module_exists('psycogreen'):
from psycogreen.gevent.psyco_gevent import make_psycopg_green
make_psycopg_green()
if module_exists('pymysql'):
import pymysql
pymysql.install_as_MySQLdb()
if module_exists('gevent_zeromq'):
from gevent_zeromq import zmq
sys.modules["zmq"] = zmq
|
Check for greened package - consistency
|
Check for greened package - consistency
|
Python
|
mit
|
philipn/green-monkey
|
python
|
## Code Before:
import sys
from utils import module_exists
from gevent import monkey
def make_django_green():
monkey.patch_all()
if module_exists('psycogreen'):
from psycogreen.gevent.psyco_gevent import make_psycopg_green
make_psycopg_green()
if module_exists('pymysql'):
import pymysql
pymysql.install_as_MySQLdb()
if module_exists('zmq'):
from gevent_zeromq import zmq
sys.modules["zmq"] = zmq
## Instruction:
Check for greened package - consistency
## Code After:
import sys
from utils import module_exists
from gevent import monkey
def make_django_green():
monkey.patch_all()
if module_exists('psycogreen'):
from psycogreen.gevent.psyco_gevent import make_psycopg_green
make_psycopg_green()
if module_exists('pymysql'):
import pymysql
pymysql.install_as_MySQLdb()
if module_exists('gevent_zeromq'):
from gevent_zeromq import zmq
sys.modules["zmq"] = zmq
|
...
import pymysql
pymysql.install_as_MySQLdb()
if module_exists('gevent_zeromq'):
from gevent_zeromq import zmq
sys.modules["zmq"] = zmq
...
|
73caeecd963326f4789eb3dc484e59ffb475e12f
|
blankspot_stats.py
|
blankspot_stats.py
|
import MapGardening
import optparse
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage)
p.add_option('--place', '-p',
default="all"
)
options, arguments = p.parse_args()
possible_tables = [
'hist_point',
'hist_point_250m',
'hist_point_500m',
'hist_point_1000m',
'hist_point_proximity',
]
if options.place == "all":
places = MapGardening.get_all_places()
else:
placename = options.place
place = MapGardening.get_place(placename)
places = {placename: place}
MapGardening.init_logging()
for placename in places.keys():
print "printing blankspot info for", placename
MapGardening.init_db(places[placename]['dbname'])
for table in possible_tables:
nodetable = MapGardening.NodeTable(table) # Table may not exist, but object will still be created
nodetable.get_blankspot_stats()
MapGardening.disconnect_db()
|
import MapGardening
import optparse
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage)
p.add_option('--place', '-p',
default="all"
)
options, arguments = p.parse_args()
possible_tables = [
'blankspots_1000_b',
]
if options.place == "all":
places = MapGardening.get_all_places()
else:
placename = options.place
place = MapGardening.get_place(placename)
places = {placename: place}
MapGardening.init_logging()
for placename in places.keys():
print "printing blankspot info for", placename
MapGardening.init_db(places[placename]['dbname'])
for table in possible_tables:
nodetable = MapGardening.NodeTable(table) # Table may not exist, but object will still be created
nodetable.get_blankspot_stats()
MapGardening.disconnect_db()
|
Add docstring, change tables searched
|
Add docstring, change tables searched
|
Python
|
mit
|
almccon/mapgardening,almccon/mapgardening,almccon/mapgardening,almccon/mapgardening
|
python
|
## Code Before:
import MapGardening
import optparse
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage)
p.add_option('--place', '-p',
default="all"
)
options, arguments = p.parse_args()
possible_tables = [
'hist_point',
'hist_point_250m',
'hist_point_500m',
'hist_point_1000m',
'hist_point_proximity',
]
if options.place == "all":
places = MapGardening.get_all_places()
else:
placename = options.place
place = MapGardening.get_place(placename)
places = {placename: place}
MapGardening.init_logging()
for placename in places.keys():
print "printing blankspot info for", placename
MapGardening.init_db(places[placename]['dbname'])
for table in possible_tables:
nodetable = MapGardening.NodeTable(table) # Table may not exist, but object will still be created
nodetable.get_blankspot_stats()
MapGardening.disconnect_db()
## Instruction:
Add docstring, change tables searched
## Code After:
import MapGardening
import optparse
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage)
p.add_option('--place', '-p',
default="all"
)
options, arguments = p.parse_args()
possible_tables = [
'blankspots_1000_b',
]
if options.place == "all":
places = MapGardening.get_all_places()
else:
placename = options.place
place = MapGardening.get_place(placename)
places = {placename: place}
MapGardening.init_logging()
for placename in places.keys():
print "printing blankspot info for", placename
MapGardening.init_db(places[placename]['dbname'])
for table in possible_tables:
nodetable = MapGardening.NodeTable(table) # Table may not exist, but object will still be created
nodetable.get_blankspot_stats()
MapGardening.disconnect_db()
|
# ... existing code ...
possible_tables = [
'blankspots_1000_b',
]
if options.place == "all":
# ... rest of the code ...
|
441da7a34058733c298c81dbd97a35fca6e538e0
|
pgpdump/__main__.py
|
pgpdump/__main__.py
|
import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
|
import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
|
Remove cProfile inclusion, always read file as binary
|
Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <[email protected]>
|
Python
|
bsd-3-clause
|
toofishes/python-pgpdump
|
python
|
## Code Before:
import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
## Instruction:
Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <[email protected]>
## Code After:
import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
|
// ... existing code ...
import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
// ... modified code ...
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
...
parsefile(filename)
if __name__ == '__main__':
main()
// ... rest of the code ...
|
9e85483d7baef82e7081639e2df746ed80c38418
|
tests/test_wheeler.py
|
tests/test_wheeler.py
|
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
if __name__ == '__main__':
unittest.main()
|
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
Cover the line that handles the pip<=1.5.2 error case.
|
Cover the line that handles the pip<=1.5.2 error case.
|
Python
|
bsd-3-clause
|
tylerdave/devpi-builder
|
python
|
## Code Before:
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
if __name__ == '__main__':
unittest.main()
## Instruction:
Cover the line that handles the pip<=1.5.2 error case.
## Code After:
import os.path as path
import unittest
from devpi_builder import wheeler
class WheelTest(unittest.TestCase):
def test_build(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertRegexpMatches(wheel_file, '\.whl$')
self.assert_(path.exists(wheel_file))
def test_cleans_up_created_files(self):
with wheeler.Builder() as builder:
wheel_file = builder('progressbar', '2.2')
self.assertFalse(path.exists(wheel_file))
def test_provides_file_that_is_already_a_wheel(self):
with wheeler.Builder() as builder:
wheel_file = builder('wheel', '0.24')
self.assert_(path.exists(wheel_file))
def test_throws_custom_on_build_failure(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
|
...
with self.assertRaises(wheeler.BuildError):
builder('package_that_hopefully_does_not_exist', '99.999')
def test_look_for_non_existing_wheel(self):
with wheeler.Builder() as builder:
with self.assertRaises(wheeler.BuildError):
builder('nothing_can_be_found', '1.1')
if __name__ == '__main__':
unittest.main()
...
|
7eb580d11dc8506cf656021d12884562d1a1b823
|
dumper/site.py
|
dumper/site.py
|
from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
|
from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
|
Use keyword based `format` to maintain 2.6 compatibility
|
Use keyword based `format` to maintain 2.6 compatibility
|
Python
|
mit
|
saulshanabrook/django-dumper
|
python
|
## Code Before:
from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
## Instruction:
Use keyword based `format` to maintain 2.6 compatibility
## Code After:
from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
|
...
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
...
|
d5a00553101dd3d431dd79494b9b57cfa56cb4be
|
worker.py
|
worker.py
|
import os
from redis import Redis
from rq import Queue
queue_name = 'test' if os.environ.get('SECUREDROP_ENV') == 'test' else 'default'
q = Queue(name=queue_name, connection=Redis())
def enqueue(*args, **kwargs):
q.enqueue(*args, **kwargs)
|
import os
from redis import Redis
from rq import Queue
queue_name = 'test' if os.environ.get('SECUREDROP_ENV') == 'test' else 'default'
# `srm` can take a long time on large files, so allow it run for up to an hour
q = Queue(name=queue_name, connection=Redis(), default_timeout=3600)
def enqueue(*args, **kwargs):
q.enqueue(*args, **kwargs)
|
Increase job timeout for securely deleting files
|
Increase job timeout for securely deleting files
|
Python
|
agpl-3.0
|
mark-in/securedrop-app-code,mark-in/securedrop-app-code,mark-in/securedrop-app-code,mark-in/securedrop-app-code
|
python
|
## Code Before:
import os
from redis import Redis
from rq import Queue
queue_name = 'test' if os.environ.get('SECUREDROP_ENV') == 'test' else 'default'
q = Queue(name=queue_name, connection=Redis())
def enqueue(*args, **kwargs):
q.enqueue(*args, **kwargs)
## Instruction:
Increase job timeout for securely deleting files
## Code After:
import os
from redis import Redis
from rq import Queue
queue_name = 'test' if os.environ.get('SECUREDROP_ENV') == 'test' else 'default'
# `srm` can take a long time on large files, so allow it run for up to an hour
q = Queue(name=queue_name, connection=Redis(), default_timeout=3600)
def enqueue(*args, **kwargs):
q.enqueue(*args, **kwargs)
|
...
queue_name = 'test' if os.environ.get('SECUREDROP_ENV') == 'test' else 'default'
# `srm` can take a long time on large files, so allow it run for up to an hour
q = Queue(name=queue_name, connection=Redis(), default_timeout=3600)
def enqueue(*args, **kwargs):
q.enqueue(*args, **kwargs)
...
|
d6a03fad6c9280981ae3beee24de89bd6361bcc9
|
dumbrepl.py
|
dumbrepl.py
|
if __name__ == "__main__":
import pycket.test.testhelper as th
th.dumb_repl()
|
if __name__ == "__main__":
import pycket.values
import pycket.config
from pycket.env import w_global_config
#w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
th.dumb_repl()
|
Make sure things are loaded right.
|
Make sure things are loaded right.
|
Python
|
mit
|
samth/pycket,pycket/pycket,pycket/pycket,samth/pycket,samth/pycket,pycket/pycket
|
python
|
## Code Before:
if __name__ == "__main__":
import pycket.test.testhelper as th
th.dumb_repl()
## Instruction:
Make sure things are loaded right.
## Code After:
if __name__ == "__main__":
import pycket.values
import pycket.config
from pycket.env import w_global_config
#w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
th.dumb_repl()
|
// ... existing code ...
if __name__ == "__main__":
import pycket.values
import pycket.config
from pycket.env import w_global_config
#w_global_config.set_linklet_mode_off()
import pycket.test.testhelper as th
th.dumb_repl()
// ... rest of the code ...
|
118eabf049db8804635001b2348fcb81c8a2a4f4
|
openstack_dashboard/dashboards/admin/routers/ports/tables.py
|
openstack_dashboard/dashboards/admin/routers/ports/tables.py
|
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.networks.ports \
import tables as networks_tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
DISPLAY_CHOICES = (
("UP", pgettext_lazy("Admin state of a Network", u"UP")),
("DOWN", pgettext_lazy("Admin state of a Network", u"DOWN")),
)
class PortsTable(tables.DataTable):
name = tables.Column("name",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
fixed_ips = tables.Column(networks_tables.get_fixed_ips,
verbose_name=_("Fixed IPs"))
status = tables.Column("status", verbose_name=_("Status"))
device_owner = tables.Column(routers_tables.get_device_owner,
verbose_name=_("Type"))
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=DISPLAY_CHOICES)
def get_object_display(self, port):
return port.id
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
|
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
class PortsTable(routers_tables.PortsTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
|
Fix router details's name empty and change inheritance project table
|
Fix router details's name empty and change inheritance project table
In admin router details page, the name column is empty,
change to if no name show id. And change to inheritance
from port table of project.
Change-Id: I54d4ad95bd04db2432eb47f848917a452c5f54e9
Closes-bug:#1417948
|
Python
|
apache-2.0
|
j4/horizon,yeming233/horizon,henaras/horizon,yeming233/horizon,damien-dg/horizon,tqtran7/horizon,BiznetGIO/horizon,Hodorable/0602,tqtran7/horizon,RudoCris/horizon,dan1/horizon-x509,agileblaze/OpenStackTwoFactorAuthentication,kfox1111/horizon,maestro-hybrid-cloud/horizon,NeCTAR-RC/horizon,redhat-openstack/horizon,Tesora/tesora-horizon,vladryk/horizon,agileblaze/OpenStackTwoFactorAuthentication,yjxtogo/horizon,NCI-Cloud/horizon,wolverineav/horizon,Solinea/horizon,ChameleonCloud/horizon,saydulk/horizon,bac/horizon,mdavid/horizon,damien-dg/horizon,tellesnobrega/horizon,tqtran7/horizon,redhat-openstack/horizon,ChameleonCloud/horizon,openstack/horizon,Tesora/tesora-horizon,Metaswitch/horizon,icloudrnd/automation_tools,BiznetGIO/horizon,django-leonardo/horizon,tellesnobrega/horizon,luhanhan/horizon,dan1/horizon-proto,Tesora/tesora-horizon,mandeepdhami/horizon,tellesnobrega/horizon,yeming233/horizon,blueboxgroup/horizon,RudoCris/horizon,icloudrnd/automation_tools,mandeepdhami/horizon,Solinea/horizon,newrocknj/horizon,sandvine/horizon,endorphinl/horizon-fork,Mirantis/mos-horizon,Metaswitch/horizon,VaneCloud/horizon,FNST-OpenStack/horizon,liyitest/rr,pranavtendolkr/horizon,philoniare/horizon,coreycb/horizon,mandeepdhami/horizon,icloudrnd/automation_tools,henaras/horizon,redhat-cip/horizon,CiscoSystems/horizon,kfox1111/horizon,dan1/horizon-x509,Mirantis/mos-horizon,Mirantis/mos-horizon,Tesora/tesora-horizon,yeming233/horizon,promptworks/horizon,yjxtogo/horizon,xinwu/horizon,newrocknj/horizon,BiznetGIO/horizon,promptworks/horizon,pranavtendolkr/horizon,newrocknj/horizon,NeCTAR-RC/horizon,pranavtendolkr/horizon,CiscoSystems/horizon,vladryk/horizon,philoniare/horizon,anthonydillon/horizon,luhanhan/horizon,luhanhan/horizon,xinwu/horizon,sandvine/horizon,eayunstack/horizon,xinwu/horizon,bac/horizon,dan1/horizon-proto,j4/horizon,eayunstack/horizon,izadorozhna/dashboard_integration_tests,newrocknj/horizon,henaras/horizon,philoniare/horizon,anthonydillon/horizon,Metaswitch/horizon,mdavid/horizon,kfox1111/horizon,blueboxgroup/horizon,icloudrnd/automation_tools,gerrive/horizon,tqtran7/horizon,j4/horizon,django-leonardo/horizon,redhat-cip/horizon,endorphinl/horizon,noironetworks/horizon,dan1/horizon-x509,tellesnobrega/horizon,agileblaze/OpenStackTwoFactorAuthentication,endorphinl/horizon-fork,Dark-Hacker/horizon,ChameleonCloud/horizon,takeshineshiro/horizon,coreycb/horizon,mdavid/horizon,anthonydillon/horizon,Dark-Hacker/horizon,NCI-Cloud/horizon,karthik-suresh/horizon,luhanhan/horizon,blueboxgroup/horizon,philoniare/horizon,Daniex/horizon,endorphinl/horizon,RudoCris/horizon,bigswitch/horizon,redhat-cip/horizon,FNST-OpenStack/horizon,openstack/horizon,endorphinl/horizon-fork,noironetworks/horizon,CiscoSystems/horizon,noironetworks/horizon,saydulk/horizon,wangxiangyu/horizon,openstack/horizon,NeCTAR-RC/horizon,davidcusatis/horizon,NCI-Cloud/horizon,wolverineav/horizon,wolverineav/horizon,redhat-openstack/horizon,doug-fish/horizon,gerrive/horizon,liyitest/rr,bac/horizon,wangxiangyu/horizon,endorphinl/horizon,yjxtogo/horizon,wolverineav/horizon,agileblaze/OpenStackTwoFactorAuthentication,endorphinl/horizon,FNST-OpenStack/horizon,takeshineshiro/horizon,BiznetGIO/horizon,mdavid/horizon,dan1/horizon-proto,takeshineshiro/horizon,henaras/horizon,xinwu/horizon,saydulk/horizon,maestro-hybrid-cloud/horizon,j4/horizon,davidcusatis/horizon,bigswitch/horizon,coreycb/horizon,karthik-suresh/horizon,wangxiangyu/horizon,VaneCloud/horizon,watonyweng/horizon,vladryk/horizon,promptworks/horizon,mandeepdhami/horizon,maestro-hybrid-cloud/horizon,idjaw/horizon,dan1/horizon-proto,endorphinl/horizon-fork,NeCTAR-RC/horizon,davidcusatis/horizon,Daniex/horizon,Mirantis/mos-horizon,saydulk/horizon,RudoCris/horizon,django-leonardo/horizon,gerrive/horizon,Daniex/horizon,izadorozhna/dashboard_integration_tests,liyitest/rr,yjxtogo/horizon,idjaw/horizon,FNST-OpenStack/horizon,karthik-suresh/horizon,bigswitch/horizon,idjaw/horizon,Solinea/horizon,damien-dg/horizon,VaneCloud/horizon,maestro-hybrid-cloud/horizon,sandvine/horizon,eayunstack/horizon,karthik-suresh/horizon,coreycb/horizon,vladryk/horizon,Hodorable/0602,CiscoSystems/horizon,ChameleonCloud/horizon,gerrive/horizon,openstack/horizon,dan1/horizon-x509,Metaswitch/horizon,Dark-Hacker/horizon,redhat-openstack/horizon,django-leonardo/horizon,liyitest/rr,Solinea/horizon,wangxiangyu/horizon,Hodorable/0602,damien-dg/horizon,bigswitch/horizon,VaneCloud/horizon,idjaw/horizon,watonyweng/horizon,sandvine/horizon,promptworks/horizon,doug-fish/horizon,pranavtendolkr/horizon,bac/horizon,Hodorable/0602,doug-fish/horizon,doug-fish/horizon,blueboxgroup/horizon,watonyweng/horizon,watonyweng/horizon,takeshineshiro/horizon,noironetworks/horizon,Dark-Hacker/horizon,anthonydillon/horizon,Daniex/horizon,davidcusatis/horizon,kfox1111/horizon,NCI-Cloud/horizon,redhat-cip/horizon
|
python
|
## Code Before:
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.networks.ports \
import tables as networks_tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
DISPLAY_CHOICES = (
("UP", pgettext_lazy("Admin state of a Network", u"UP")),
("DOWN", pgettext_lazy("Admin state of a Network", u"DOWN")),
)
class PortsTable(tables.DataTable):
name = tables.Column("name",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
fixed_ips = tables.Column(networks_tables.get_fixed_ips,
verbose_name=_("Fixed IPs"))
status = tables.Column("status", verbose_name=_("Status"))
device_owner = tables.Column(routers_tables.get_device_owner,
verbose_name=_("Type"))
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=DISPLAY_CHOICES)
def get_object_display(self, port):
return port.id
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
## Instruction:
Fix router details's name empty and change inheritance project table
In admin router details page, the name column is empty,
change to if no name show id. And change to inheritance
from port table of project.
Change-Id: I54d4ad95bd04db2432eb47f848917a452c5f54e9
Closes-bug:#1417948
## Code After:
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
class PortsTable(routers_tables.PortsTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
class Meta(object):
name = "interfaces"
verbose_name = _("Interfaces")
|
...
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.routers.ports \
import tables as routers_tables
class PortsTable(routers_tables.PortsTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:admin:networks:ports:detail")
class Meta(object):
name = "interfaces"
...
|
97a490db75f0a4976199365c3f654ba8cdb9a781
|
01_Built-in_Types/tuple.py
|
01_Built-in_Types/tuple.py
|
import sys
import pickle
# Check argument
if len(sys.argv) != 2:
print("%s filename" % sys.argv[0])
raise SystemExit(1)
# Write tuples
file = open(sys.argv[1], "wb");
line = []
while True:
print("Enter name, age, score (ex: zzz, 16, 90) or quit");
line = sys.stdin.readline()
if line == "quit\n":
break
raws = line.split(",")
name = raws[0]
age = int(raws[1])
score = int(raws[2])
record = (name, age, score)
pickle.dump(record, file)
file.close()
# Read back
file = open(sys.argv[1], "rb");
while True:
try:
record = pickle.load(file)
print record
name, age, score= record
print("name = %s" % name)
print("name = %d" % age)
print("name = %d" % score)
except (EOFError):
break
file.close()
|
import sys
import pickle
# Test zip, and format in print
names = ["xxx", "yyy", "zzz"]
ages = [18, 19, 20]
persons = zip(names, ages)
for name, age in persons:
print "{0}'s age is {1}".format(name, age)
# Check argument
if len(sys.argv) != 2:
print("%s filename" % sys.argv[0])
raise SystemExit(1)
# Write tuples
file = open(sys.argv[1], "wb");
line = []
while True:
print("Enter name, age, score (ex: zzz, 16, 90) or quit");
line = sys.stdin.readline()
if line == "quit\n":
break
raws = line.split(",")
name = raws[0]
age = int(raws[1])
score = int(raws[2])
record = (name, age, score)
pickle.dump(record, file)
file.close()
# Read back
file = open(sys.argv[1], "rb");
while True:
try:
record = pickle.load(file)
print record
name, age, score= record
print("name = %s" % name)
print("name = %d" % age)
print("name = %d" % score)
except (EOFError):
break
file.close()
|
Test zip, and print format
|
Test zip, and print format
|
Python
|
bsd-2-clause
|
zzz0072/Python_Exercises,zzz0072/Python_Exercises
|
python
|
## Code Before:
import sys
import pickle
# Check argument
if len(sys.argv) != 2:
print("%s filename" % sys.argv[0])
raise SystemExit(1)
# Write tuples
file = open(sys.argv[1], "wb");
line = []
while True:
print("Enter name, age, score (ex: zzz, 16, 90) or quit");
line = sys.stdin.readline()
if line == "quit\n":
break
raws = line.split(",")
name = raws[0]
age = int(raws[1])
score = int(raws[2])
record = (name, age, score)
pickle.dump(record, file)
file.close()
# Read back
file = open(sys.argv[1], "rb");
while True:
try:
record = pickle.load(file)
print record
name, age, score= record
print("name = %s" % name)
print("name = %d" % age)
print("name = %d" % score)
except (EOFError):
break
file.close()
## Instruction:
Test zip, and print format
## Code After:
import sys
import pickle
# Test zip, and format in print
names = ["xxx", "yyy", "zzz"]
ages = [18, 19, 20]
persons = zip(names, ages)
for name, age in persons:
print "{0}'s age is {1}".format(name, age)
# Check argument
if len(sys.argv) != 2:
print("%s filename" % sys.argv[0])
raise SystemExit(1)
# Write tuples
file = open(sys.argv[1], "wb");
line = []
while True:
print("Enter name, age, score (ex: zzz, 16, 90) or quit");
line = sys.stdin.readline()
if line == "quit\n":
break
raws = line.split(",")
name = raws[0]
age = int(raws[1])
score = int(raws[2])
record = (name, age, score)
pickle.dump(record, file)
file.close()
# Read back
file = open(sys.argv[1], "rb");
while True:
try:
record = pickle.load(file)
print record
name, age, score= record
print("name = %s" % name)
print("name = %d" % age)
print("name = %d" % score)
except (EOFError):
break
file.close()
|
...
import sys
import pickle
# Test zip, and format in print
names = ["xxx", "yyy", "zzz"]
ages = [18, 19, 20]
persons = zip(names, ages)
for name, age in persons:
print "{0}'s age is {1}".format(name, age)
# Check argument
if len(sys.argv) != 2:
...
|
ba49a9b3344f30f5bd3ea05144546e6a8a763ef0
|
tests/test_cli/test_config.py
|
tests/test_cli/test_config.py
|
from __future__ import absolute_import, division, print_function
from mock import patch
from tests.test_cli.utils import BaseCommandTestCase
from polyaxon_cli.cli.config import config
class TestConfigManager(BaseCommandTestCase):
@patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config')
def test_config_list(self, get_user):
self.runner.invoke(config, ['--list'])
assert get_user.call_count == 1
|
from __future__ import absolute_import, division, print_function
from mock import patch
from tests.test_cli.utils import BaseCommandTestCase
from polyaxon_cli.cli.config import config
class TestConfigManager(BaseCommandTestCase):
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
def test_config_list_checks_initialized(self, is_initialized):
is_initialized.return_value = False
self.runner.invoke(config, ['--list'])
assert is_initialized.call_count == 1
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
@patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG')
def test_config_list_gets_default_config(self, default_config, is_initialized):
is_initialized.return_value = False
self.runner.invoke(config, ['--list'])
assert default_config.call_count == 1
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
@patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config')
def test_config_list_gets_file_config(self, get_config, is_initialized):
is_initialized.return_value = True
self.runner.invoke(config, ['--list'])
assert get_config.call_count == 1
|
Add more tests for config list
|
Add more tests for config list
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
python
|
## Code Before:
from __future__ import absolute_import, division, print_function
from mock import patch
from tests.test_cli.utils import BaseCommandTestCase
from polyaxon_cli.cli.config import config
class TestConfigManager(BaseCommandTestCase):
@patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config')
def test_config_list(self, get_user):
self.runner.invoke(config, ['--list'])
assert get_user.call_count == 1
## Instruction:
Add more tests for config list
## Code After:
from __future__ import absolute_import, division, print_function
from mock import patch
from tests.test_cli.utils import BaseCommandTestCase
from polyaxon_cli.cli.config import config
class TestConfigManager(BaseCommandTestCase):
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
def test_config_list_checks_initialized(self, is_initialized):
is_initialized.return_value = False
self.runner.invoke(config, ['--list'])
assert is_initialized.call_count == 1
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
@patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG')
def test_config_list_gets_default_config(self, default_config, is_initialized):
is_initialized.return_value = False
self.runner.invoke(config, ['--list'])
assert default_config.call_count == 1
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
@patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config')
def test_config_list_gets_file_config(self, get_config, is_initialized):
is_initialized.return_value = True
self.runner.invoke(config, ['--list'])
assert get_config.call_count == 1
|
...
class TestConfigManager(BaseCommandTestCase):
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
def test_config_list_checks_initialized(self, is_initialized):
is_initialized.return_value = False
self.runner.invoke(config, ['--list'])
assert is_initialized.call_count == 1
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
@patch('polyaxon_cli.managers.config.GlobalConfigManager.CONFIG')
def test_config_list_gets_default_config(self, default_config, is_initialized):
is_initialized.return_value = False
self.runner.invoke(config, ['--list'])
assert default_config.call_count == 1
@patch('polyaxon_cli.managers.config.GlobalConfigManager.is_initialized')
@patch('polyaxon_cli.managers.config.GlobalConfigManager.get_config')
def test_config_list_gets_file_config(self, get_config, is_initialized):
is_initialized.return_value = True
self.runner.invoke(config, ['--list'])
assert get_config.call_count == 1
...
|
d7e9eba6fb3628f0736bd468ae76e05099b9d651
|
space/decorators.py
|
space/decorators.py
|
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from incubator.settings import STATUS_SECRETS
def one_or_zero(arg):
"""Typecast to 1 or 0"""
if arg == '1':
return 1
elif arg == '0':
return 0
raise ValueError("not one or zero")
def private_api(**required_params):
"""
Filter incoming private API requests, and perform parameter validation and
extraction
"""
def outer(some_view):
@csrf_exempt
def inner(request, *args, **kwargs):
if request.method != 'POST':
return HttpResponseBadRequest("Only POST is allowed")
if 'secret' not in request.POST.keys():
return HttpResponseBadRequest(
"You must query this endpoint with a secret.")
if request.POST['secret'] not in STATUS_SECRETS:
message = 'Bad secret {} is not in the allowed list'.format(
request.POST['secret'])
return HttpResponseForbidden(message)
params = {}
for name, typecast in required_params.items():
if name not in request.POST.keys():
return HttpResponseBadRequest(
"Parameter %s is required" % name)
try:
params[name] = typecast(request.POST[name])
except ValueError:
return HttpResponseBadRequest(
"Did not understood %s=%s" % (name, request.POST[name]))
return some_view(request, **params)
return inner
return outer
|
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
def one_or_zero(arg):
"""Typecast to 1 or 0"""
if arg == '1':
return 1
elif arg == '0':
return 0
raise ValueError("not one or zero")
def private_api(**required_params):
"""
Filter incoming private API requests, and perform parameter validation and
extraction
"""
def outer(some_view):
@csrf_exempt
def inner(request, *args, **kwargs):
if request.method != 'POST':
return HttpResponseBadRequest("Only POST is allowed")
if 'secret' not in request.POST.keys():
return HttpResponseBadRequest(
"You must query this endpoint with a secret.")
if request.POST['secret'] not in settings.STATUS_SECRETS:
message = 'Bad secret {} is not in the allowed list'.format(
request.POST['secret'])
return HttpResponseForbidden(message)
params = {}
for name, typecast in required_params.items():
if name not in request.POST.keys():
return HttpResponseBadRequest(
"Parameter %s is required" % name)
try:
params[name] = typecast(request.POST[name])
except ValueError:
return HttpResponseBadRequest(
"Did not understood %s=%s" % (name, request.POST[name]))
return some_view(request, **params)
return inner
return outer
|
Use from django.conf import settings
|
Use from django.conf import settings
|
Python
|
agpl-3.0
|
UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator
|
python
|
## Code Before:
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from incubator.settings import STATUS_SECRETS
def one_or_zero(arg):
"""Typecast to 1 or 0"""
if arg == '1':
return 1
elif arg == '0':
return 0
raise ValueError("not one or zero")
def private_api(**required_params):
"""
Filter incoming private API requests, and perform parameter validation and
extraction
"""
def outer(some_view):
@csrf_exempt
def inner(request, *args, **kwargs):
if request.method != 'POST':
return HttpResponseBadRequest("Only POST is allowed")
if 'secret' not in request.POST.keys():
return HttpResponseBadRequest(
"You must query this endpoint with a secret.")
if request.POST['secret'] not in STATUS_SECRETS:
message = 'Bad secret {} is not in the allowed list'.format(
request.POST['secret'])
return HttpResponseForbidden(message)
params = {}
for name, typecast in required_params.items():
if name not in request.POST.keys():
return HttpResponseBadRequest(
"Parameter %s is required" % name)
try:
params[name] = typecast(request.POST[name])
except ValueError:
return HttpResponseBadRequest(
"Did not understood %s=%s" % (name, request.POST[name]))
return some_view(request, **params)
return inner
return outer
## Instruction:
Use from django.conf import settings
## Code After:
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
def one_or_zero(arg):
"""Typecast to 1 or 0"""
if arg == '1':
return 1
elif arg == '0':
return 0
raise ValueError("not one or zero")
def private_api(**required_params):
"""
Filter incoming private API requests, and perform parameter validation and
extraction
"""
def outer(some_view):
@csrf_exempt
def inner(request, *args, **kwargs):
if request.method != 'POST':
return HttpResponseBadRequest("Only POST is allowed")
if 'secret' not in request.POST.keys():
return HttpResponseBadRequest(
"You must query this endpoint with a secret.")
if request.POST['secret'] not in settings.STATUS_SECRETS:
message = 'Bad secret {} is not in the allowed list'.format(
request.POST['secret'])
return HttpResponseForbidden(message)
params = {}
for name, typecast in required_params.items():
if name not in request.POST.keys():
return HttpResponseBadRequest(
"Parameter %s is required" % name)
try:
params[name] = typecast(request.POST[name])
except ValueError:
return HttpResponseBadRequest(
"Did not understood %s=%s" % (name, request.POST[name]))
return some_view(request, **params)
return inner
return outer
|
...
from django.http import HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
def one_or_zero(arg):
...
return HttpResponseBadRequest(
"You must query this endpoint with a secret.")
if request.POST['secret'] not in settings.STATUS_SECRETS:
message = 'Bad secret {} is not in the allowed list'.format(
request.POST['secret'])
return HttpResponseForbidden(message)
...
|
592c98eefdeaba3bf5d1be29ea497503282abf78
|
src/shk-trace/src/file_descriptor.h
|
src/shk-trace/src/file_descriptor.h
|
namespace shk {
namespace detail {
void closeFd(int fd);
}
using FileDescriptor = RAIIHelper<int, void, detail::closeFd, -1>;
} // namespace shk
|
namespace shk {
namespace detail {
void closeFd(int fd);
} // namespace detail
using FileDescriptor = RAIIHelper<int, void, detail::closeFd, -1>;
} // namespace shk
|
Add missing namespace closing comment
|
Style: Add missing namespace closing comment
|
C
|
apache-2.0
|
per-gron/shuriken,per-gron/shuriken,per-gron/shuriken,per-gron/shuriken
|
c
|
## Code Before:
namespace shk {
namespace detail {
void closeFd(int fd);
}
using FileDescriptor = RAIIHelper<int, void, detail::closeFd, -1>;
} // namespace shk
## Instruction:
Style: Add missing namespace closing comment
## Code After:
namespace shk {
namespace detail {
void closeFd(int fd);
} // namespace detail
using FileDescriptor = RAIIHelper<int, void, detail::closeFd, -1>;
} // namespace shk
|
...
void closeFd(int fd);
} // namespace detail
using FileDescriptor = RAIIHelper<int, void, detail::closeFd, -1>;
...
|
d612e3122fb3e0b100990cd1fd805285f3cda096
|
f-tep-api/src/main/java/com/cgi/eoss/ftep/api/projections/ShortJob.java
|
f-tep-api/src/main/java/com/cgi/eoss/ftep/api/projections/ShortJob.java
|
package com.cgi.eoss.ftep.api.projections;
import com.cgi.eoss.ftep.api.security.FtepPermission;
import com.cgi.eoss.ftep.model.Job;
import com.cgi.eoss.ftep.model.JobStatus;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import java.time.LocalDateTime;
/**
* <p>Default JSON projection for embedded {@link Job}s. Embeds the owner as a ShortUser.</p>
*/
@Projection(name = "shortFtepService", types = {Job.class})
public interface ShortJob extends EmbeddedId {
String getExtId();
ShortUser getOwner();
JobStatus getStatus();
String getGuiUrl();
String getStage();
LocalDateTime getStartTime();
LocalDateTime getEndTime();
@Value("#{@ftepSecurityService.getCurrentPermission(target.class, target.id)}")
FtepPermission getAccessLevel();
}
|
package com.cgi.eoss.ftep.api.projections;
import com.cgi.eoss.ftep.api.security.FtepPermission;
import com.cgi.eoss.ftep.model.Job;
import com.cgi.eoss.ftep.model.JobStatus;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import java.time.LocalDateTime;
/**
* <p>Default JSON projection for embedded {@link Job}s. Embeds the owner as a ShortUser.</p>
*/
@Projection(name = "shortFtepService", types = {Job.class})
public interface ShortJob extends EmbeddedId {
String getExtId();
ShortUser getOwner();
JobStatus getStatus();
String getGuiUrl();
String getStage();
LocalDateTime getStartTime();
LocalDateTime getEndTime();
@Value("#{target.config.service.name}")
String getServiceName();
@Value("#{@ftepSecurityService.getCurrentPermission(target.class, target.id)}")
FtepPermission getAccessLevel();
}
|
Add service name to embedded Job representations
|
Add service name to embedded Job representations
Change-Id: I7bb7e8dcbb85f563267dcd9ad823365c8a4efa9c
|
Java
|
agpl-3.0
|
cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep,cgi-eoss/ftep
|
java
|
## Code Before:
package com.cgi.eoss.ftep.api.projections;
import com.cgi.eoss.ftep.api.security.FtepPermission;
import com.cgi.eoss.ftep.model.Job;
import com.cgi.eoss.ftep.model.JobStatus;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import java.time.LocalDateTime;
/**
* <p>Default JSON projection for embedded {@link Job}s. Embeds the owner as a ShortUser.</p>
*/
@Projection(name = "shortFtepService", types = {Job.class})
public interface ShortJob extends EmbeddedId {
String getExtId();
ShortUser getOwner();
JobStatus getStatus();
String getGuiUrl();
String getStage();
LocalDateTime getStartTime();
LocalDateTime getEndTime();
@Value("#{@ftepSecurityService.getCurrentPermission(target.class, target.id)}")
FtepPermission getAccessLevel();
}
## Instruction:
Add service name to embedded Job representations
Change-Id: I7bb7e8dcbb85f563267dcd9ad823365c8a4efa9c
## Code After:
package com.cgi.eoss.ftep.api.projections;
import com.cgi.eoss.ftep.api.security.FtepPermission;
import com.cgi.eoss.ftep.model.Job;
import com.cgi.eoss.ftep.model.JobStatus;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.rest.core.config.Projection;
import java.time.LocalDateTime;
/**
* <p>Default JSON projection for embedded {@link Job}s. Embeds the owner as a ShortUser.</p>
*/
@Projection(name = "shortFtepService", types = {Job.class})
public interface ShortJob extends EmbeddedId {
String getExtId();
ShortUser getOwner();
JobStatus getStatus();
String getGuiUrl();
String getStage();
LocalDateTime getStartTime();
LocalDateTime getEndTime();
@Value("#{target.config.service.name}")
String getServiceName();
@Value("#{@ftepSecurityService.getCurrentPermission(target.class, target.id)}")
FtepPermission getAccessLevel();
}
|
# ... existing code ...
String getStage();
LocalDateTime getStartTime();
LocalDateTime getEndTime();
@Value("#{target.config.service.name}")
String getServiceName();
@Value("#{@ftepSecurityService.getCurrentPermission(target.class, target.id)}")
FtepPermission getAccessLevel();
}
# ... rest of the code ...
|
a0907ff742c81b676f602d1e17d820152f95d22e
|
django_docs/urls.py
|
django_docs/urls.py
|
from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
|
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
|
Add validation URL for Google Webmaster Tools.
|
Add validation URL for Google Webmaster Tools.
|
Python
|
bsd-3-clause
|
hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,gnarf/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,rmoorman/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,relekang/djangoproject.com,alawnchen/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,gnarf/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,nanuxbe/django,hassanabidpk/djangoproject.com,khkaminska/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,gnarf/djangoproject.com,django/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,django/djangoproject.com
|
python
|
## Code Before:
from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
## Instruction:
Add validation URL for Google Webmaster Tools.
## Code After:
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
|
...
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
...
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
...
|
21c7e73f99ab7d0a980a3719635157bb50ebc5bb
|
src/main/java/com/imaginarycode/minecraft/hubmagic/bungee/HubMagicReconnectHandler.java
|
src/main/java/com/imaginarycode/minecraft/hubmagic/bungee/HubMagicReconnectHandler.java
|
/**
* Copyright © 2014 tuxed <[email protected]>
* This work is free. You can redistribute it and/or modify it under the
* terms of the Do What The Fuck You Want To Public License, Version 2,
* as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
*/
package com.imaginarycode.minecraft.hubmagic.bungee;
import com.imaginarycode.minecraft.hubmagic.HubMagic;
import com.imaginarycode.minecraft.hubmagic.selectors.ServerSelector;
import lombok.AllArgsConstructor;
import net.md_5.bungee.api.AbstractReconnectHandler;
import net.md_5.bungee.api.config.ServerInfo;
import net.md_5.bungee.api.connection.ProxiedPlayer;
@AllArgsConstructor
public class HubMagicReconnectHandler extends AbstractReconnectHandler {
private final ServerSelector selector;
@Override
protected ServerInfo getStoredServer(ProxiedPlayer player) {
return selector.selectServer(player);
}
@Override
public final void setServer(ProxiedPlayer player) {
}
@Override
public final void save() {
HubMagic.getPlugin().getPingManager().shutdown();
}
@Override
public final void close() {
}
}
|
/**
* Copyright © 2014 tuxed <[email protected]>
* This work is free. You can redistribute it and/or modify it under the
* terms of the Do What The Fuck You Want To Public License, Version 2,
* as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
*/
package com.imaginarycode.minecraft.hubmagic.bungee;
import com.imaginarycode.minecraft.hubmagic.HubMagic;
import com.imaginarycode.minecraft.hubmagic.selectors.ServerSelector;
import lombok.AllArgsConstructor;
import net.md_5.bungee.api.AbstractReconnectHandler;
import net.md_5.bungee.api.config.ServerInfo;
import net.md_5.bungee.api.connection.ProxiedPlayer;
@AllArgsConstructor
public class HubMagicReconnectHandler extends AbstractReconnectHandler {
private final ServerSelector selector;
@Override
protected ServerInfo getStoredServer(ProxiedPlayer player) {
return selector.selectServer(player);
}
@Override
public final void setServer(ProxiedPlayer player) {
}
@Override
public final void save() {
}
@Override
public final void close() {
HubMagic.getPlugin().getPingManager().shutdown();
}
}
|
Fix a rather embarrassing bug where HubMagic's pinging functionality would shut down after five minutes.
|
Fix a rather embarrassing bug where HubMagic's pinging functionality would shut down after five minutes.
|
Java
|
unlicense
|
minecrafter/HubMagic
|
java
|
## Code Before:
/**
* Copyright © 2014 tuxed <[email protected]>
* This work is free. You can redistribute it and/or modify it under the
* terms of the Do What The Fuck You Want To Public License, Version 2,
* as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
*/
package com.imaginarycode.minecraft.hubmagic.bungee;
import com.imaginarycode.minecraft.hubmagic.HubMagic;
import com.imaginarycode.minecraft.hubmagic.selectors.ServerSelector;
import lombok.AllArgsConstructor;
import net.md_5.bungee.api.AbstractReconnectHandler;
import net.md_5.bungee.api.config.ServerInfo;
import net.md_5.bungee.api.connection.ProxiedPlayer;
@AllArgsConstructor
public class HubMagicReconnectHandler extends AbstractReconnectHandler {
private final ServerSelector selector;
@Override
protected ServerInfo getStoredServer(ProxiedPlayer player) {
return selector.selectServer(player);
}
@Override
public final void setServer(ProxiedPlayer player) {
}
@Override
public final void save() {
HubMagic.getPlugin().getPingManager().shutdown();
}
@Override
public final void close() {
}
}
## Instruction:
Fix a rather embarrassing bug where HubMagic's pinging functionality would shut down after five minutes.
## Code After:
/**
* Copyright © 2014 tuxed <[email protected]>
* This work is free. You can redistribute it and/or modify it under the
* terms of the Do What The Fuck You Want To Public License, Version 2,
* as published by Sam Hocevar. See http://www.wtfpl.net/ for more details.
*/
package com.imaginarycode.minecraft.hubmagic.bungee;
import com.imaginarycode.minecraft.hubmagic.HubMagic;
import com.imaginarycode.minecraft.hubmagic.selectors.ServerSelector;
import lombok.AllArgsConstructor;
import net.md_5.bungee.api.AbstractReconnectHandler;
import net.md_5.bungee.api.config.ServerInfo;
import net.md_5.bungee.api.connection.ProxiedPlayer;
@AllArgsConstructor
public class HubMagicReconnectHandler extends AbstractReconnectHandler {
private final ServerSelector selector;
@Override
protected ServerInfo getStoredServer(ProxiedPlayer player) {
return selector.selectServer(player);
}
@Override
public final void setServer(ProxiedPlayer player) {
}
@Override
public final void save() {
}
@Override
public final void close() {
HubMagic.getPlugin().getPingManager().shutdown();
}
}
|
// ... existing code ...
@Override
public final void save() {
}
@Override
public final void close() {
HubMagic.getPlugin().getPingManager().shutdown();
}
}
// ... rest of the code ...
|
b506b6796a8ed9e778f69ddc7718a8ea3b0f9e7a
|
flynn/__init__.py
|
flynn/__init__.py
|
import flynn.decoder
import flynn.encoder
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
def dumps(obj):
return flynn.encoder.encode_str(obj)
def dumph(obj):
return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj))
def load(s):
return flynn.decoder.decode(s)
def loads(s):
return flynn.decoder.decode(s)
def loadh(s):
return flynn.decoder.decode(s)
|
import base64
import flynn.decoder
import flynn.encoder
__all__ = [
"decoder",
"encoder",
"dump",
"dumps",
"dumph",
"load",
"loads",
"loadh"
]
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
def dumps(obj):
return flynn.encoder.encode_str(obj)
def dumph(obj):
return base64.b16encode(dumps(obj)).decode("utf-8")
def load(s):
return flynn.decoder.decode(s)
def loads(s):
return flynn.decoder.decode(s)
def loadh(s):
return flynn.decoder.decode(s)
|
Use base64 module to convert between bytes and base16 string
|
Use base64 module to convert between bytes and base16 string
|
Python
|
mit
|
fritz0705/flynn
|
python
|
## Code Before:
import flynn.decoder
import flynn.encoder
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
def dumps(obj):
return flynn.encoder.encode_str(obj)
def dumph(obj):
return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj))
def load(s):
return flynn.decoder.decode(s)
def loads(s):
return flynn.decoder.decode(s)
def loadh(s):
return flynn.decoder.decode(s)
## Instruction:
Use base64 module to convert between bytes and base16 string
## Code After:
import base64
import flynn.decoder
import flynn.encoder
__all__ = [
"decoder",
"encoder",
"dump",
"dumps",
"dumph",
"load",
"loads",
"loadh"
]
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
def dumps(obj):
return flynn.encoder.encode_str(obj)
def dumph(obj):
return base64.b16encode(dumps(obj)).decode("utf-8")
def load(s):
return flynn.decoder.decode(s)
def loads(s):
return flynn.decoder.decode(s)
def loadh(s):
return flynn.decoder.decode(s)
|
# ... existing code ...
import base64
import flynn.decoder
import flynn.encoder
__all__ = [
"decoder",
"encoder",
"dump",
"dumps",
"dumph",
"load",
"loads",
"loadh"
]
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
# ... modified code ...
return flynn.encoder.encode_str(obj)
def dumph(obj):
return base64.b16encode(dumps(obj)).decode("utf-8")
def load(s):
return flynn.decoder.decode(s)
# ... rest of the code ...
|
78ca9c6b8393b1b4f4bddf41febc87696796d28a
|
openpassword/openssl_utils.py
|
openpassword/openssl_utils.py
|
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hash=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hash.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
|
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hashing_function=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hashing_function.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
|
Rename hash variable to prevent colision with native method
|
Rename hash variable to prevent colision with native method
|
Python
|
mit
|
openpassword/blimey,openpassword/blimey
|
python
|
## Code Before:
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hash=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hash.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
## Instruction:
Rename hash variable to prevent colision with native method
## Code After:
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hashing_function=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hashing_function.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
|
...
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hashing_function=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hashing_function.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
...
|
2c357a54e30eecb1d7b717be3ed774dcfecc2814
|
src/stratis_cli/_actions/_stratis.py
|
src/stratis_cli/_actions/_stratis.py
|
from .._stratisd_constants import RedundancyCodes
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._data import Manager
class StratisActions():
"""
Stratis actions.
"""
@staticmethod
def list_stratisd_redundancy(namespace):
"""
List the stratisd redundancy designations.
"""
# pylint: disable=unused-argument
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(namespace):
"""
List the stratisd version.
"""
# pylint: disable=unused-argument
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
|
from .._stratisd_constants import RedundancyCodes
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._data import Manager
class StratisActions():
"""
Stratis actions.
"""
@staticmethod
def list_stratisd_redundancy(_namespace):
"""
List the stratisd redundancy designations.
"""
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(_namespace):
"""
List the stratisd version.
"""
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
|
Use '_' prefix instead of disabling pylint unused-argument lint
|
Use '_' prefix instead of disabling pylint unused-argument lint
It is more precise to mark the unused parameters this way.
Signed-off-by: mulhern <[email protected]>
|
Python
|
apache-2.0
|
stratis-storage/stratis-cli,stratis-storage/stratis-cli
|
python
|
## Code Before:
from .._stratisd_constants import RedundancyCodes
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._data import Manager
class StratisActions():
"""
Stratis actions.
"""
@staticmethod
def list_stratisd_redundancy(namespace):
"""
List the stratisd redundancy designations.
"""
# pylint: disable=unused-argument
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(namespace):
"""
List the stratisd version.
"""
# pylint: disable=unused-argument
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
## Instruction:
Use '_' prefix instead of disabling pylint unused-argument lint
It is more precise to mark the unused parameters this way.
Signed-off-by: mulhern <[email protected]>
## Code After:
from .._stratisd_constants import RedundancyCodes
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._data import Manager
class StratisActions():
"""
Stratis actions.
"""
@staticmethod
def list_stratisd_redundancy(_namespace):
"""
List the stratisd redundancy designations.
"""
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(_namespace):
"""
List the stratisd version.
"""
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
|
# ... existing code ...
"""
@staticmethod
def list_stratisd_redundancy(_namespace):
"""
List the stratisd redundancy designations.
"""
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
def list_stratisd_version(_namespace):
"""
List the stratisd version.
"""
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
# ... rest of the code ...
|
48e15b8f99bb0714b7ec465a0131e452c67004e5
|
Chapter4_TheGreatestTheoremNeverTold/top_pic_comments.py
|
Chapter4_TheGreatestTheoremNeverTold/top_pic_comments.py
|
import sys
import numpy as np
from IPython.core.display import Image
import praw
reddit = praw.Reddit("BayesianMethodsForHackers")
subreddit = reddit.get_subreddit( "pics" )
top_submissions = subreddit.get_top()
n_pic = int( sys.argv[1] ) if sys.argv[1] else 1
i = 0
while i < n_pic:
top_submission = top_submissions.next()
while "i.imgur.com" not in top_submission.url:
#make sure it is linking to an image, not a webpage.
top_submission = top_submissions.next()
i+=1
print "Title of submission: \n", top_submission.title
top_post_url = top_submission.url
#top_submission.replace_more_comments(limit=5, threshold=0)
print top_post_url
upvotes = []
downvotes = []
contents = []
_all_comments = top_submission.comments
all_comments=[]
for comment in _all_comments:
try:
upvotes.append( comment.ups )
downvotes.append( comment.downs )
contents.append( comment.body )
except Exception as e:
continue
votes = np.array( [ upvotes, downvotes] ).T
|
import sys
import numpy as np
from IPython.core.display import Image
import praw
reddit = praw.Reddit("BayesianMethodsForHackers")
subreddit = reddit.get_subreddit( "pics" )
top_submissions = subreddit.get_top()
n_pic = int( sys.argv[1] ) if len(sys.argv) > 1 else 1
i = 0
while i < n_pic:
top_submission = top_submissions.next()
while "i.imgur.com" not in top_submission.url:
#make sure it is linking to an image, not a webpage.
top_submission = top_submissions.next()
i+=1
print "Title of submission: \n", top_submission.title
top_post_url = top_submission.url
#top_submission.replace_more_comments(limit=5, threshold=0)
print top_post_url
upvotes = []
downvotes = []
contents = []
_all_comments = top_submission.comments
all_comments=[]
for comment in _all_comments:
try:
upvotes.append( comment.ups )
downvotes.append( comment.downs )
contents.append( comment.body )
except Exception as e:
continue
votes = np.array( [ upvotes, downvotes] ).T
|
Fix index to list when there is no 2nd element
|
Fix index to list when there is no 2nd element
|
Python
|
mit
|
chengwliu/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ultinomics/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,Fillll/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ifduyue/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,jrmontag/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,lexual/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ViralLeadership/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,jrmontag/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,lexual/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,aitatanit/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,noelevans/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,noelevans/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,chengwliu/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,jrmontag/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,alkalait/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ViralLeadership/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ViralLeadership/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,shhong/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,lexual/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,chengwliu/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ifduyue/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,Fillll/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,alkalait/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,aitatanit/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,shhong/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ultinomics/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,aitatanit/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ifduyue/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,Fillll/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,alkalait/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,ultinomics/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,shhong/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers,noelevans/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers
|
python
|
## Code Before:
import sys
import numpy as np
from IPython.core.display import Image
import praw
reddit = praw.Reddit("BayesianMethodsForHackers")
subreddit = reddit.get_subreddit( "pics" )
top_submissions = subreddit.get_top()
n_pic = int( sys.argv[1] ) if sys.argv[1] else 1
i = 0
while i < n_pic:
top_submission = top_submissions.next()
while "i.imgur.com" not in top_submission.url:
#make sure it is linking to an image, not a webpage.
top_submission = top_submissions.next()
i+=1
print "Title of submission: \n", top_submission.title
top_post_url = top_submission.url
#top_submission.replace_more_comments(limit=5, threshold=0)
print top_post_url
upvotes = []
downvotes = []
contents = []
_all_comments = top_submission.comments
all_comments=[]
for comment in _all_comments:
try:
upvotes.append( comment.ups )
downvotes.append( comment.downs )
contents.append( comment.body )
except Exception as e:
continue
votes = np.array( [ upvotes, downvotes] ).T
## Instruction:
Fix index to list when there is no 2nd element
## Code After:
import sys
import numpy as np
from IPython.core.display import Image
import praw
reddit = praw.Reddit("BayesianMethodsForHackers")
subreddit = reddit.get_subreddit( "pics" )
top_submissions = subreddit.get_top()
n_pic = int( sys.argv[1] ) if len(sys.argv) > 1 else 1
i = 0
while i < n_pic:
top_submission = top_submissions.next()
while "i.imgur.com" not in top_submission.url:
#make sure it is linking to an image, not a webpage.
top_submission = top_submissions.next()
i+=1
print "Title of submission: \n", top_submission.title
top_post_url = top_submission.url
#top_submission.replace_more_comments(limit=5, threshold=0)
print top_post_url
upvotes = []
downvotes = []
contents = []
_all_comments = top_submission.comments
all_comments=[]
for comment in _all_comments:
try:
upvotes.append( comment.ups )
downvotes.append( comment.downs )
contents.append( comment.body )
except Exception as e:
continue
votes = np.array( [ upvotes, downvotes] ).T
|
...
top_submissions = subreddit.get_top()
n_pic = int( sys.argv[1] ) if len(sys.argv) > 1 else 1
i = 0
while i < n_pic:
...
contents.append( comment.body )
except Exception as e:
continue
votes = np.array( [ upvotes, downvotes] ).T
...
|
86066890322e3c3654946a49c8d1cd2e1a1c2980
|
celery/tests/test_backends/__init__.py
|
celery/tests/test_backends/__init__.py
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
|
Python
|
bsd-3-clause
|
WoLpH/celery,ask/celery,mitsuhiko/celery,mitsuhiko/celery,WoLpH/celery,frac/celery,cbrepo/celery,cbrepo/celery,frac/celery,ask/celery
|
python
|
## Code Before:
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.pyredis import RedisBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("redis", RedisBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
## Instruction:
tests.backends: Test using DatabaseBackend instead of RedisBackend, as the latter requires the redis module to be installed.
## Code After:
import unittest2 as unittest
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
def test_get_backend_cahe(self):
backends._backend_cache = {}
backends.get_backend_cls("amqp")
self.assertIn("amqp", backends._backend_cache)
amqp_backend = backends.get_backend_cls("amqp")
self.assertIs(amqp_backend, backends._backend_cache["amqp"])
|
...
from celery import backends
from celery.backends.amqp import AMQPBackend
from celery.backends.database import DatabaseBackend
class TestBackends(unittest.TestCase):
...
def test_get_backend_aliases(self):
expects = [("amqp", AMQPBackend),
("database", DatabaseBackend)]
for expect_name, expect_cls in expects:
self.assertIsInstance(backends.get_backend_cls(expect_name)(),
expect_cls)
...
|
d9c2a7112ba239fb64ecc76ce844caed9146a5dc
|
nova/db/sqlalchemy/migrate_repo/versions/023_add_vm_mode_to_instances.py
|
nova/db/sqlalchemy/migrate_repo/versions/023_add_vm_mode_to_instances.py
|
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
Load table schema automatically instead of stubbing out
|
Load table schema automatically instead of stubbing out
|
Python
|
apache-2.0
|
barnsnake351/nova,NeCTAR-RC/nova,zaina/nova,NewpTone/stacklab-nova,russellb/nova,Juniper/nova,tangfeixiong/nova,JioCloud/nova,affo/nova,NewpTone/stacklab-nova,gspilio/nova,maoy/zknova,SUSE-Cloud/nova,josephsuh/extra-specs,vmturbo/nova,usc-isi/nova,russellb/nova,psiwczak/openstack,ewindisch/nova,TieWei/nova,dstroppa/openstack-smartos-nova-grizzly,cloudbau/nova,vmturbo/nova,imsplitbit/nova,rajalokan/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,vmturbo/nova,TwinkleChawla/nova,DirectXMan12/nova-hacking,CloudServer/nova,psiwczak/openstack,shail2810/nova,cloudbase/nova-virtualbox,houshengbo/nova_vmware_compute_driver,varunarya10/nova_test_latest,viggates/nova,JianyuWang/nova,tianweizhang/nova,gooddata/openstack-nova,alexandrucoman/vbox-nova-driver,silenceli/nova,rajalokan/nova,scripnichenko/nova,tianweizhang/nova,zzicewind/nova,eharney/nova,orbitfp7/nova,iuliat/nova,gooddata/openstack-nova,zhimin711/nova,isyippee/nova,ted-gould/nova,apporc/nova,Triv90/Nova,luogangyi/bcec-nova,klmitch/nova,akash1808/nova,cernops/nova,shahar-stratoscale/nova,eonpatapon/nova,phenoxim/nova,MountainWei/nova,virtualopensystems/nova,hanlind/nova,maheshp/novatest,sridevikoushik31/openstack,Stavitsky/nova,usc-isi/extra-specs,devendermishrajio/nova_test_latest,alvarolopez/nova,vladikr/nova_drafts,NoBodyCam/TftpPxeBootBareMetal,mikalstill/nova,angdraug/nova,leilihh/nova,rrader/nova-docker-plugin,alexandrucoman/vbox-nova-driver,projectcalico/calico-nova,eayunstack/nova,tudorvio/nova,isyippee/nova,yosshy/nova,mmnelemane/nova,openstack/nova,fajoy/nova,sebrandon1/nova,usc-isi/extra-specs,badock/nova,qwefi/nova,devendermishrajio/nova,LoHChina/nova,scripnichenko/nova,berrange/nova,openstack/nova,psiwczak/openstack,houshengbo/nova_vmware_compute_driver,Yusuke1987/openstack_template,Metaswitch/calico-nova,badock/nova,savi-dev/nova,maelnor/nova,ntt-sic/nova,jianghuaw/nova,watonyweng/nova,Tehsmash/nova,mgagne/nova,mahak/nova,bgxavier/nova,tangfeixiong/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,whitepages/nova,mandeepdhami/nova,NeCTAR-RC/nova,affo/nova,ewindisch/nova,rickerc/nova_audit,eharney/nova,CloudServer/nova,Stavitsky/nova,devendermishrajio/nova_test_latest,cernops/nova,gspilio/nova,SUSE-Cloud/nova,nikesh-mahalka/nova,CCI-MOC/nova,petrutlucian94/nova,CiscoSystems/nova,hanlind/nova,maheshp/novatest,yrobla/nova,mahak/nova,cloudbase/nova,citrix-openstack-build/nova,qwefi/nova,virtualopensystems/nova,Yuriy-Leonov/nova,cloudbase/nova,blueboxgroup/nova,shootstar/novatest,josephsuh/extra-specs,houshengbo/nova_vmware_compute_driver,mgagne/nova,CEG-FYP-OpenStack/scheduler,DirectXMan12/nova-hacking,ruslanloman/nova,tealover/nova,salv-orlando/MyRepo,eneabio/nova,bigswitch/nova,BeyondTheClouds/nova,edulramirez/nova,j-carpentier/nova,joker946/nova,fajoy/nova,Triv90/Nova,sileht/deb-openstack-nova,cyx1231st/nova,iuliat/nova,sacharya/nova,NoBodyCam/TftpPxeBootBareMetal,JioCloud/nova_test_latest,raildo/nova,NoBodyCam/TftpPxeBootBareMetal,salv-orlando/MyRepo,citrix-openstack-build/nova,paulmathews/nova,j-carpentier/nova,sridevikoushik31/nova,gspilio/nova,aristanetworks/arista-ovs-nova,zhimin711/nova,josephsuh/extra-specs,fajoy/nova,usc-isi/nova,salv-orlando/MyRepo,klmitch/nova,spring-week-topos/nova-week,orbitfp7/nova,berrange/nova,KarimAllah/nova,imsplitbit/nova,akash1808/nova_test_latest,whitepages/nova,kimjaejoong/nova,saleemjaveds/https-github.com-openstack-nova,spring-week-topos/nova-week,tanglei528/nova,nikesh-mahalka/nova,Yusuke1987/openstack_template,noironetworks/nova,jeffrey4l/nova,devoid/nova,CCI-MOC/nova,ntt-sic/nova,petrutlucian94/nova_dev,dstroppa/openstack-smartos-nova-grizzly,Yuriy-Leonov/nova,fnordahl/nova,gooddata/openstack-nova,rajalokan/nova,yosshy/nova,rahulunair/nova,TieWei/nova,paulmathews/nova,phenoxim/nova,shahar-stratoscale/nova,dstroppa/openstack-smartos-nova-grizzly,yrobla/nova,klmitch/nova,openstack/nova,sileht/deb-openstack-nova,sridevikoushik31/openstack,plumgrid/plumgrid-nova,dawnpower/nova,KarimAllah/nova,adelina-t/nova,Triv90/Nova,sacharya/nova,eneabio/nova,felixma/nova,Metaswitch/calico-nova,bclau/nova,Francis-Liu/animated-broccoli,rahulunair/nova,petrutlucian94/nova,usc-isi/extra-specs,savi-dev/nova,CiscoSystems/nova,felixma/nova,shail2810/nova,NewpTone/stacklab-nova,sileht/deb-openstack-nova,russellb/nova,cyx1231st/nova,tudorvio/nova,sridevikoushik31/nova,cernops/nova,BeyondTheClouds/nova,noironetworks/nova,maoy/zknova,sebrandon1/nova,savi-dev/nova,varunarya10/nova_test_latest,Juniper/nova,maelnor/nova,akash1808/nova,gooddata/openstack-nova,Juniper/nova,joker946/nova,eonpatapon/nova,dims/nova,LoHChina/nova,cloudbase/nova,yatinkumbhare/openstack-nova,plumgrid/plumgrid-nova,fnordahl/nova,dawnpower/nova,usc-isi/nova,KarimAllah/nova,tealover/nova,sridevikoushik31/nova,bgxavier/nova,JianyuWang/nova,akash1808/nova_test_latest,jianghuaw/nova,eayunstack/nova,alaski/nova,double12gzh/nova,watonyweng/nova,MountainWei/nova,eneabio/nova,belmiromoreira/nova,Juniper/nova,zzicewind/nova,DirectXMan12/nova-hacking,jeffrey4l/nova,ruslanloman/nova,kimjaejoong/nova,sridevikoushik31/openstack,Francis-Liu/animated-broccoli,devoid/nova,vmturbo/nova,leilihh/nova,rickerc/nova_audit,yatinkumbhare/openstack-nova,mandeepdhami/nova,TwinkleChawla/nova,petrutlucian94/nova_dev,zaina/nova,yrobla/nova,leilihh/novaha,rajalokan/nova,CEG-FYP-OpenStack/scheduler,bigswitch/nova,projectcalico/calico-nova,ted-gould/nova,mikalstill/nova,maheshp/novatest,barnsnake351/nova,raildo/nova,saleemjaveds/https-github.com-openstack-nova,mikalstill/nova,angdraug/nova,vladikr/nova_drafts,rrader/nova-docker-plugin,belmiromoreira/nova,redhat-openstack/nova,silenceli/nova,apporc/nova,dims/nova,OpenAcademy-OpenStack/nova-scheduler,mmnelemane/nova,sebrandon1/nova,sridevikoushik31/nova,JioCloud/nova,thomasem/nova,edulramirez/nova,cloudbase/nova-virtualbox,JioCloud/nova_test_latest,alaski/nova,mahak/nova,redhat-openstack/nova,cloudbau/nova,jianghuaw/nova,paulmathews/nova,shootstar/novatest,OpenAcademy-OpenStack/nova-scheduler,tanglei528/nova,rahulunair/nova,luogangyi/bcec-nova,aristanetworks/arista-ovs-nova,jianghuaw/nova,takeshineshiro/nova,thomasem/nova,aristanetworks/arista-ovs-nova,Tehsmash/nova,hanlind/nova,alvarolopez/nova,double12gzh/nova,devendermishrajio/nova,maoy/zknova,klmitch/nova,leilihh/novaha,BeyondTheClouds/nova,blueboxgroup/nova,adelina-t/nova,bclau/nova,viggates/nova,takeshineshiro/nova
|
python
|
## Code Before:
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta,
Column('id', Integer(), primary_key=True, nullable=False),
)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
## Instruction:
Load table schema automatically instead of stubbing out
## Code After:
from sqlalchemy import Column, Integer, MetaData, String, Table
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
assert_unicode=None, unicode_error=None,
_warn_on_bytestring=False),
nullable=True)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta.bind = migrate_engine
instances.create_column(instances_vm_mode)
def downgrade(migrate_engine):
meta.bind = migrate_engine
instances.drop_column('vm_mode')
|
# ... existing code ...
meta = MetaData()
instances = Table('instances', meta, autoload=True)
instances_vm_mode = Column('vm_mode',
String(length=255, convert_unicode=False,
# ... rest of the code ...
|
18925af2a74c20e86867bce9c480b5cd710b6b32
|
openbudgets/apps/sheets/utilities.py
|
openbudgets/apps/sheets/utilities.py
|
from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
|
from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
Set comparable state of node.
|
Set comparable state of node.
|
Python
|
bsd-3-clause
|
openbudgets/openbudgets,openbudgets/openbudgets,pwalsh/openbudgets,pwalsh/openbudgets,openbudgets/openbudgets,pwalsh/openbudgets
|
python
|
## Code Before:
from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
## Instruction:
Set comparable state of node.
## Code After:
from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
# ... existing code ...
from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
# ... rest of the code ...
|
bb4cddb804e0a2e8c67fa19d72a82fc8ec3a07bf
|
context-logger/context-logger-jaxws/src/main/java/io/tracee/contextlogger/jaxws/container/TraceeClientErrorLoggingHandler.java
|
context-logger/context-logger-jaxws/src/main/java/io/tracee/contextlogger/jaxws/container/TraceeClientErrorLoggingHandler.java
|
package io.tracee.contextlogger.jaxws.container;
import io.tracee.Tracee;
import io.tracee.TraceeBackend;
import io.tracee.TraceeLogger;
import io.tracee.jaxws.container.TraceeServerHandler;
import javax.xml.ws.handler.soap.SOAPMessageContext;
/**
* JaxWs client side handler that detects uncaught exceptions and outputs contextual information.
*/
public class TraceeClientErrorLoggingHandler extends AbstractTraceeErrorLoggingHandler {
private final TraceeLogger traceeLogger = this.getTraceeBackend().getLoggerFactory().getLogger(
TraceeServerHandler.class);
TraceeClientErrorLoggingHandler(TraceeBackend traceeBackend) {
super(traceeBackend);
}
public TraceeClientErrorLoggingHandler() {
this(Tracee.getBackend());
}
@Override
protected final void handleIncoming(SOAPMessageContext context) {
// Do nothing
}
@Override
protected final void handleOutgoing(SOAPMessageContext context) {
storeMessageInThreadLocal(context);
}
}
|
package io.tracee.contextlogger.jaxws.container;
import io.tracee.Tracee;
import io.tracee.TraceeBackend;
import io.tracee.TraceeLogger;
import io.tracee.jaxws.container.TraceeServerHandler;
import javax.xml.ws.handler.soap.SOAPMessageContext;
/**
* JaxWs client side handler that detects uncaught exceptions and outputs contextual information.
*/
public class TraceeClientErrorLoggingHandler extends AbstractTraceeErrorLoggingHandler {
TraceeClientErrorLoggingHandler(TraceeBackend traceeBackend) {
super(traceeBackend);
}
public TraceeClientErrorLoggingHandler() {
this(Tracee.getBackend());
}
@Override
protected final void handleIncoming(SOAPMessageContext context) {
// Do nothing
}
@Override
protected final void handleOutgoing(SOAPMessageContext context) {
storeMessageInThreadLocal(context);
}
}
|
Remove unused variables and fields
|
Remove unused variables and fields
|
Java
|
bsd-3-clause
|
Hippoom/tracee,SvenBunge/tracee,danielwegener/tracee,tracee/tracee,hypery2k/tracee
|
java
|
## Code Before:
package io.tracee.contextlogger.jaxws.container;
import io.tracee.Tracee;
import io.tracee.TraceeBackend;
import io.tracee.TraceeLogger;
import io.tracee.jaxws.container.TraceeServerHandler;
import javax.xml.ws.handler.soap.SOAPMessageContext;
/**
* JaxWs client side handler that detects uncaught exceptions and outputs contextual information.
*/
public class TraceeClientErrorLoggingHandler extends AbstractTraceeErrorLoggingHandler {
private final TraceeLogger traceeLogger = this.getTraceeBackend().getLoggerFactory().getLogger(
TraceeServerHandler.class);
TraceeClientErrorLoggingHandler(TraceeBackend traceeBackend) {
super(traceeBackend);
}
public TraceeClientErrorLoggingHandler() {
this(Tracee.getBackend());
}
@Override
protected final void handleIncoming(SOAPMessageContext context) {
// Do nothing
}
@Override
protected final void handleOutgoing(SOAPMessageContext context) {
storeMessageInThreadLocal(context);
}
}
## Instruction:
Remove unused variables and fields
## Code After:
package io.tracee.contextlogger.jaxws.container;
import io.tracee.Tracee;
import io.tracee.TraceeBackend;
import io.tracee.TraceeLogger;
import io.tracee.jaxws.container.TraceeServerHandler;
import javax.xml.ws.handler.soap.SOAPMessageContext;
/**
* JaxWs client side handler that detects uncaught exceptions and outputs contextual information.
*/
public class TraceeClientErrorLoggingHandler extends AbstractTraceeErrorLoggingHandler {
TraceeClientErrorLoggingHandler(TraceeBackend traceeBackend) {
super(traceeBackend);
}
public TraceeClientErrorLoggingHandler() {
this(Tracee.getBackend());
}
@Override
protected final void handleIncoming(SOAPMessageContext context) {
// Do nothing
}
@Override
protected final void handleOutgoing(SOAPMessageContext context) {
storeMessageInThreadLocal(context);
}
}
|
# ... existing code ...
*/
public class TraceeClientErrorLoggingHandler extends AbstractTraceeErrorLoggingHandler {
TraceeClientErrorLoggingHandler(TraceeBackend traceeBackend) {
super(traceeBackend);
}
# ... modified code ...
public TraceeClientErrorLoggingHandler() {
this(Tracee.getBackend());
}
@Override
protected final void handleIncoming(SOAPMessageContext context) {
# ... rest of the code ...
|
f5e8bfaf5c4f7a2131fbe0ffd0f8d14a316b907e
|
camoco/Exceptions.py
|
camoco/Exceptions.py
|
class CamocoError(Exception):
pass
class CamocoExistsError(CamocoError):
'''
You tried to create a camoco object which already exists
under the same name,type combination.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'You are trying to create a Camoco based object'
'That already exists' + message.format(*args)
)
class CamocoGeneNameError(CamocoError):
'''
Gene names must be beautiful snowflakes.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = 'Gene names must be unique:' + message.format(args)
class CamocoAccessionNameError(CamocoError):
'''
Accession names must be Unique.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'Accession names must be unique:' + message.format(args)
)
class CamocoZeroWindowError(CamocoError):
def __init__(self,expr,message,*args):
self.expr = expr
self.message = (
'Operation requiring window, but window is 0:' + \
message.format(args)
)
|
class CamocoError(Exception):
pass
class CamocoExistsError(CamocoError):
'''
You tried to create a camoco object which already exists
under the same name,type combination.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'You are trying to create a Camoco based object'
'That already exists' + message.format(*args)
)
class CamocoGeneNameError(CamocoError):
'''
Gene names must be beautiful snowflakes.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = 'Gene names must be unique:' + message.format(args)
class CamocoAccessionNameError(CamocoError):
'''
Accession names must be Unique.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'Accession names must be unique:' + message.format(args)
)
class CamocoZeroWindowError(CamocoError):
def __init__(self,expr,message,*args):
self.expr = expr
self.message = (
'Operation requiring window, but window is 0:' + \
message.format(args)
)
class CamocoInteractive(CamocoError):
def __init__(self,expr=None,message='',*args):
self.expr = expr
self.message = 'Camoco interactive ipython session.'
|
Add exception for cli command line to run interactively.
|
Add exception for cli command line to run interactively.
|
Python
|
mit
|
schae234/Camoco,schae234/Camoco
|
python
|
## Code Before:
class CamocoError(Exception):
pass
class CamocoExistsError(CamocoError):
'''
You tried to create a camoco object which already exists
under the same name,type combination.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'You are trying to create a Camoco based object'
'That already exists' + message.format(*args)
)
class CamocoGeneNameError(CamocoError):
'''
Gene names must be beautiful snowflakes.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = 'Gene names must be unique:' + message.format(args)
class CamocoAccessionNameError(CamocoError):
'''
Accession names must be Unique.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'Accession names must be unique:' + message.format(args)
)
class CamocoZeroWindowError(CamocoError):
def __init__(self,expr,message,*args):
self.expr = expr
self.message = (
'Operation requiring window, but window is 0:' + \
message.format(args)
)
## Instruction:
Add exception for cli command line to run interactively.
## Code After:
class CamocoError(Exception):
pass
class CamocoExistsError(CamocoError):
'''
You tried to create a camoco object which already exists
under the same name,type combination.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'You are trying to create a Camoco based object'
'That already exists' + message.format(*args)
)
class CamocoGeneNameError(CamocoError):
'''
Gene names must be beautiful snowflakes.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = 'Gene names must be unique:' + message.format(args)
class CamocoAccessionNameError(CamocoError):
'''
Accession names must be Unique.
'''
def __init__(self,expr,message='',*args):
self.expr = expr
self.message = (
'Accession names must be unique:' + message.format(args)
)
class CamocoZeroWindowError(CamocoError):
def __init__(self,expr,message,*args):
self.expr = expr
self.message = (
'Operation requiring window, but window is 0:' + \
message.format(args)
)
class CamocoInteractive(CamocoError):
def __init__(self,expr=None,message='',*args):
self.expr = expr
self.message = 'Camoco interactive ipython session.'
|
# ... existing code ...
'Operation requiring window, but window is 0:' + \
message.format(args)
)
class CamocoInteractive(CamocoError):
def __init__(self,expr=None,message='',*args):
self.expr = expr
self.message = 'Camoco interactive ipython session.'
# ... rest of the code ...
|
021225cbce30b70c350133f5ae3cae9409bdd6ae
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
dbaas/dbaas_services/analyzing/admin/analyze.py
|
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
Add filters to analyzing admin
|
Add filters to analyzing admin
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
python
|
## Code Before:
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
## Instruction:
Add filters to analyzing admin
## Code After:
from __future__ import absolute_import, unicode_literals
from django_services import admin
from dbaas_services.analyzing.service import AnalyzeRepositoryService
from dbaas_services.analyzing.forms import AnalyzeRepositoryForm
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
|
...
class AnalyzeRepositoryAdmin(admin.DjangoServicesAdmin):
form = AnalyzeRepositoryForm
service_class = AnalyzeRepositoryService
search_fields = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name",)
list_filter = ("analyzed_at", "memory_alarm", "cpu_alarm")
list_display = ("analyzed_at", "database_name", "engine_name",
"environment_name", "instance_name", "cpu_alarm",
"memory_alarm")
...
|
07999d1f24acbbfde50fe94897054e7c8df7fea1
|
api/jsonstore.py
|
api/jsonstore.py
|
import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return tf
if __name__ == "__main__":
print(store('{}'))
|
import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return os.path.split(tf)[1]
if __name__ == "__main__":
print(store('{}'))
|
Tweak JSON api return value to be friendlier
|
Tweak JSON api return value to be friendlier
|
Python
|
mit
|
controversial/wikipedia-map,controversial/wikipedia-map,controversial/wikipedia-map
|
python
|
## Code Before:
import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return tf
if __name__ == "__main__":
print(store('{}'))
## Instruction:
Tweak JSON api return value to be friendlier
## Code After:
import json
import os
import tempfile
def store(data, directory="/var/www/luke/wikipedia/graphs/"):
try:
json.loads(data)
except ValueError:
return "not-json"
tf = tempfile.mkstemp(prefix="", dir=directory)[1]
with open(tf, "w") as f:
f.write(data)
return os.path.split(tf)[1]
if __name__ == "__main__":
print(store('{}'))
|
...
with open(tf, "w") as f:
f.write(data)
return os.path.split(tf)[1]
if __name__ == "__main__":
print(store('{}'))
...
|
c6c06ab8197bfe3f007bab231536656abfcf0954
|
docs/conf.py
|
docs/conf.py
|
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
Add mock for shapely module
|
Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.
|
Python
|
apache-2.0
|
mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea
|
python
|
## Code Before:
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
## Instruction:
Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.
## Code After:
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
...
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
...
|
e20f0e1fd9aee7f665dad8e5c51fa444c5972480
|
src/main/java/net/rubygrapefruit/platform/prompts/YesNoListener.java
|
src/main/java/net/rubygrapefruit/platform/prompts/YesNoListener.java
|
package net.rubygrapefruit.platform.prompts;
class YesNoListener extends AbstractListener {
private final boolean defaultValue;
private Boolean selected;
private boolean finished;
YesNoListener(boolean defaultValue) {
this.defaultValue = defaultValue;
}
public boolean isFinished() {
return finished;
}
public Boolean getSelected() {
return selected;
}
@Override
public void character(char ch) {
if (ch == 'y') {
selected = true;
finished = true;
} else if (ch == 'n') {
selected = false;
finished = true;
}
}
@Override
public void controlKey(Key key) {
if (key == Key.Enter) {
selected = defaultValue;
finished = true;
}
}
@Override
public void endInput() {
finished = true;
}
}
|
package net.rubygrapefruit.platform.prompts;
class YesNoListener extends AbstractListener {
private final boolean defaultValue;
private Boolean selected;
private boolean finished;
YesNoListener(boolean defaultValue) {
this.defaultValue = defaultValue;
}
public boolean isFinished() {
return finished;
}
public Boolean getSelected() {
return selected;
}
@Override
public void character(char ch) {
if (ch == 'y' || ch == 'Y') {
selected = true;
finished = true;
} else if (ch == 'n' || ch == 'N') {
selected = false;
finished = true;
}
}
@Override
public void controlKey(Key key) {
if (key == Key.Enter) {
selected = defaultValue;
finished = true;
}
}
@Override
public void endInput() {
finished = true;
}
}
|
Allow 'Y' and 'N' (upper case) as a response for a yes/no question.
|
Allow 'Y' and 'N' (upper case) as a response for a yes/no question.
|
Java
|
apache-2.0
|
adammurdoch/native-platform,adammurdoch/native-platform,adammurdoch/native-platform,adammurdoch/native-platform
|
java
|
## Code Before:
package net.rubygrapefruit.platform.prompts;
class YesNoListener extends AbstractListener {
private final boolean defaultValue;
private Boolean selected;
private boolean finished;
YesNoListener(boolean defaultValue) {
this.defaultValue = defaultValue;
}
public boolean isFinished() {
return finished;
}
public Boolean getSelected() {
return selected;
}
@Override
public void character(char ch) {
if (ch == 'y') {
selected = true;
finished = true;
} else if (ch == 'n') {
selected = false;
finished = true;
}
}
@Override
public void controlKey(Key key) {
if (key == Key.Enter) {
selected = defaultValue;
finished = true;
}
}
@Override
public void endInput() {
finished = true;
}
}
## Instruction:
Allow 'Y' and 'N' (upper case) as a response for a yes/no question.
## Code After:
package net.rubygrapefruit.platform.prompts;
class YesNoListener extends AbstractListener {
private final boolean defaultValue;
private Boolean selected;
private boolean finished;
YesNoListener(boolean defaultValue) {
this.defaultValue = defaultValue;
}
public boolean isFinished() {
return finished;
}
public Boolean getSelected() {
return selected;
}
@Override
public void character(char ch) {
if (ch == 'y' || ch == 'Y') {
selected = true;
finished = true;
} else if (ch == 'n' || ch == 'N') {
selected = false;
finished = true;
}
}
@Override
public void controlKey(Key key) {
if (key == Key.Enter) {
selected = defaultValue;
finished = true;
}
}
@Override
public void endInput() {
finished = true;
}
}
|
...
@Override
public void character(char ch) {
if (ch == 'y' || ch == 'Y') {
selected = true;
finished = true;
} else if (ch == 'n' || ch == 'N') {
selected = false;
finished = true;
}
...
|
fac280a022c8728f14bbe1194cf74af761b7ec3f
|
vfp2py/__main__.py
|
vfp2py/__main__.py
|
import argparse
import vfp2py
def parse_args(argv=None):
parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python')
parser.add_argument("infile", help="file to convert", type=str)
parser.add_argument("outfile", help="file to output to", type=str)
parser.add_argument("search", help="directories to search for included files", type=str, nargs='*')
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
global SEARCH_PATH
SEARCH_PATH = args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
import argparse
import vfp2py
def parse_args(argv=None):
parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python')
parser.add_argument("infile", help="file to convert", type=str)
parser.add_argument("outfile", help="file to output to", type=str)
parser.add_argument("search", help="directories to search for included files", type=str, nargs='*')
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
vfp2py.SEARCH_PATH += args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
Fix search paths not being added from arguments.
|
Fix search paths not being added from arguments.
|
Python
|
mit
|
mwisslead/vfp2py,mwisslead/vfp2py
|
python
|
## Code Before:
import argparse
import vfp2py
def parse_args(argv=None):
parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python')
parser.add_argument("infile", help="file to convert", type=str)
parser.add_argument("outfile", help="file to output to", type=str)
parser.add_argument("search", help="directories to search for included files", type=str, nargs='*')
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
global SEARCH_PATH
SEARCH_PATH = args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
## Instruction:
Fix search paths not being added from arguments.
## Code After:
import argparse
import vfp2py
def parse_args(argv=None):
parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python')
parser.add_argument("infile", help="file to convert", type=str)
parser.add_argument("outfile", help="file to output to", type=str)
parser.add_argument("search", help="directories to search for included files", type=str, nargs='*')
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
vfp2py.SEARCH_PATH += args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
...
def main(argv=None):
args = parse_args(argv)
vfp2py.SEARCH_PATH += args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
...
|
76a2248ffe8c64b15a6f7d307b6d7c726e97165c
|
alerts/cloudtrail_logging_disabled.py
|
alerts/cloudtrail_logging_disabled.py
|
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
Send Cloudtrail logging disabled alert to MOC
|
Send Cloudtrail logging disabled alert to MOC
|
Python
|
mpl-2.0
|
mozilla/MozDef,Phrozyn/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,ameihm0912/MozDef,mpurzynski/MozDef,mozilla/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,ameihm0912/MozDef,mozilla/MozDef,ameihm0912/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,mozilla/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,Phrozyn/MozDef
|
python
|
## Code Before:
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
## Instruction:
Send Cloudtrail logging disabled alert to MOC
## Code After:
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
...
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
...
|
fe32099bf1b6aa387c98dd6afdfc31557fc4e1f9
|
volpy/__init__.py
|
volpy/__init__.py
|
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
'''
Volpy
=====
A fast volume rendering implementation for Python. Volpy has support for:
1. Multithreading or multiprocessing at the rendering step
2. Native implementation of ray casting
3. Native access to NumPy arrays during rendering
4. Support for ambient and diffuse lighting terms
How to use this package
-----------------------
Volpy is organized into several different modules but the API is imported into
the root of the package. Therefore, you should write your code like this:
>>> import volpy
>>> scene = volpy.Scene(ambient=my_func)
'''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
Write a docstring for the package
|
Write a docstring for the package
|
Python
|
mit
|
OEP/volpy,OEP/volpy
|
python
|
## Code Before:
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
## Instruction:
Write a docstring for the package
## Code After:
'''
Volpy
=====
A fast volume rendering implementation for Python. Volpy has support for:
1. Multithreading or multiprocessing at the rendering step
2. Native implementation of ray casting
3. Native access to NumPy arrays during rendering
4. Support for ambient and diffuse lighting terms
How to use this package
-----------------------
Volpy is organized into several different modules but the API is imported into
the root of the package. Therefore, you should write your code like this:
>>> import volpy
>>> scene = volpy.Scene(ambient=my_func)
'''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
from .grid import Grid
from .homogeneous import (translate, scale, rotatex, rotatey, rotatez, rotatexyz,
rotate_axis, cross)
from .geometry import Geometry, BBox
|
// ... existing code ...
'''
Volpy
=====
A fast volume rendering implementation for Python. Volpy has support for:
1. Multithreading or multiprocessing at the rendering step
2. Native implementation of ray casting
3. Native access to NumPy arrays during rendering
4. Support for ambient and diffuse lighting terms
How to use this package
-----------------------
Volpy is organized into several different modules but the API is imported into
the root of the package. Therefore, you should write your code like this:
>>> import volpy
>>> scene = volpy.Scene(ambient=my_func)
'''
from .camera import Camera
from .scene import Scene, Element, Light
from .version import __version__
// ... rest of the code ...
|
c656bca77d13afcfd8d6df0286b632433e08def1
|
setup.py
|
setup.py
|
import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='[email protected]',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
|
import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='[email protected]',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
|
Add subdirs of nativeconfig package to build.
|
Add subdirs of nativeconfig package to build.
|
Python
|
mit
|
GreatFruitOmsk/nativeconfig
|
python
|
## Code Before:
import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='[email protected]',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
## Instruction:
Add subdirs of nativeconfig package to build.
## Code After:
import os
from setuptools import setup
from sys import platform
REQUIREMENTS = []
if platform.startswith('darwin'):
REQUIREMENTS.append('pyobjc-core >= 2.5')
with open(os.path.join(os.path.dirname(__file__), 'nativeconfig', 'version.py')) as f:
version = None
code = compile(f.read(), 'version.py', 'exec')
exec(code)
assert version
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
author_email='[email protected]',
description="Cross-platform python module to store application config via native subsystems such as Windows Registry or NSUserDefaults.",
platforms=["Mac OS X 10.6+", "Windows XP+", "Linux 2.6+"],
keywords='config',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=REQUIREMENTS,
test_suite='test'
)
|
// ... existing code ...
setup(
name='nativeconfig',
version=version,
packages=['nativeconfig', 'nativeconfig.options', 'nativeconfig.config'],
url='https://github.com/GreatFruitOmsk/nativeconfig',
license='MIT License',
author='Ilya Kulakov',
// ... rest of the code ...
|
877954c7af728f76fabf3078853edffbe80239f7
|
src/main/Main.java
|
src/main/Main.java
|
package main;
import approximation.Lagrange;
/**
* Main class
* @author Pavel_Verkhovtsov
*/
public class Main {
private static double[] y = {2.02, 1.98, 1.67, 1.65, 1.57, 1.42, 1.37, 1.07, 0.85, 0.48, 0.35, -0.30, -0.61, -1.2, -1.39, -1.76, -2.28, -2.81, -3.57, -4.06};
private static double[] x = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
/**
* Main method.
* @param args console parameters
*/
public static void main(final String[] args){
Lagrange lagrange = new Lagrange(x, y);
System.out.println(lagrange.approximazeFunction(2));
}
}
|
package main;
import java.util.Scanner;
import approximation.Lagrange;
/**
* Main class
* @author Pavel_Verkhovtsov
*/
public class Main {
private static double[] y = {2.02, 1.98, 1.67, 1.65, 1.57, 1.42, 1.37, 1.07, 0.85, 0.48, 0.35, -0.30, -0.61, -1.2, -1.39, -1.76, -2.28, -2.81, -3.57, -4.06};
private static double[] x = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
/**
* Main method.
* @param args console parameters
*/
@SuppressWarnings("resource")
public static void main(final String[] args){
Lagrange lagrange = new Lagrange(x, y);
System.out.print("Input approximation point: ");
double point = new Scanner(System.in).nextDouble();
System.out.println(lagrange.approximazeFunction(point));
}
}
|
Add console input approximation point
|
Add console input approximation point
|
Java
|
mit
|
VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs
|
java
|
## Code Before:
package main;
import approximation.Lagrange;
/**
* Main class
* @author Pavel_Verkhovtsov
*/
public class Main {
private static double[] y = {2.02, 1.98, 1.67, 1.65, 1.57, 1.42, 1.37, 1.07, 0.85, 0.48, 0.35, -0.30, -0.61, -1.2, -1.39, -1.76, -2.28, -2.81, -3.57, -4.06};
private static double[] x = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
/**
* Main method.
* @param args console parameters
*/
public static void main(final String[] args){
Lagrange lagrange = new Lagrange(x, y);
System.out.println(lagrange.approximazeFunction(2));
}
}
## Instruction:
Add console input approximation point
## Code After:
package main;
import java.util.Scanner;
import approximation.Lagrange;
/**
* Main class
* @author Pavel_Verkhovtsov
*/
public class Main {
private static double[] y = {2.02, 1.98, 1.67, 1.65, 1.57, 1.42, 1.37, 1.07, 0.85, 0.48, 0.35, -0.30, -0.61, -1.2, -1.39, -1.76, -2.28, -2.81, -3.57, -4.06};
private static double[] x = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
/**
* Main method.
* @param args console parameters
*/
@SuppressWarnings("resource")
public static void main(final String[] args){
Lagrange lagrange = new Lagrange(x, y);
System.out.print("Input approximation point: ");
double point = new Scanner(System.in).nextDouble();
System.out.println(lagrange.approximazeFunction(point));
}
}
|
# ... existing code ...
package main;
import java.util.Scanner;
import approximation.Lagrange;
# ... modified code ...
* Main method.
* @param args console parameters
*/
@SuppressWarnings("resource")
public static void main(final String[] args){
Lagrange lagrange = new Lagrange(x, y);
System.out.print("Input approximation point: ");
double point = new Scanner(System.in).nextDouble();
System.out.println(lagrange.approximazeFunction(point));
}
}
# ... rest of the code ...
|
a1d591b16a661449fddccfaa94994f25218a1918
|
webbridge/src/main/java/plugins/webbridge/api/WebBridgeListener.java
|
webbridge/src/main/java/plugins/webbridge/api/WebBridgeListener.java
|
package plugins.webbridge.api;
import logbook.api.APIListenerSpi;
import logbook.proxy.RequestMetaData;
import logbook.proxy.ResponseMetaData;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import plugins.webbridge.bean.WebBridgeConfig;
import javax.json.Json;
import javax.json.JsonObject;
import java.util.Date;
public class WebBridgeListener implements APIListenerSpi {
private static final MediaType JSON = MediaType.get("application/json; charset=utf-8");
private OkHttpClient client = new OkHttpClient();
@Override
public void accept(JsonObject jsonObject, RequestMetaData requestMetaData, ResponseMetaData responseMetaData) {
WebBridgeConfig config = WebBridgeConfig.get();
if (!config.isBridgeEnabled()) {
return;
}
RequestBody body = RequestBody.create(Json.createObjectBuilder()
.add("uri", requestMetaData.getRequestURI())
.add("date", new Date().getTime())
.add("body", jsonObject)
.build().toString(), JSON);
String url = "http://" + config.getBridgeHost() + ":" + config.getBridgePort() + "/pub";
Request request = new Request.Builder()
.url(url)
.post(body)
.build();
try {
this.client.newCall(request).execute();
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
package plugins.webbridge.api;
import logbook.api.APIListenerSpi;
import logbook.proxy.RequestMetaData;
import logbook.proxy.ResponseMetaData;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import plugins.webbridge.bean.WebBridgeConfig;
import javax.json.Json;
import javax.json.JsonObject;
import java.util.Date;
public class WebBridgeListener implements APIListenerSpi {
private static final MediaType JSON = MediaType.get("application/json; charset=utf-8");
private OkHttpClient client = new OkHttpClient();
@Override
public void accept(JsonObject jsonObject, RequestMetaData requestMetaData, ResponseMetaData responseMetaData) {
WebBridgeConfig config = WebBridgeConfig.get();
if (!config.isBridgeEnabled()) {
return;
}
String url = "http://" + config.getBridgeHost() + ":" + config.getBridgePort() + "/pub";
RequestBody body = RequestBody.create(Json.createObjectBuilder()
.add("uri", requestMetaData.getRequestURI())
.add("time", new Date().getTime())
.add("body", jsonObject)
.build().toString(), JSON);
Request request = new Request.Builder()
.url(url)
.post(body)
.build();
try {
this.client.newCall(request).execute();
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
Rename the WebBridge payload property: date -> time
|
Rename the WebBridge payload property: date -> time
|
Java
|
mit
|
rsky/logbook-kai-plugins
|
java
|
## Code Before:
package plugins.webbridge.api;
import logbook.api.APIListenerSpi;
import logbook.proxy.RequestMetaData;
import logbook.proxy.ResponseMetaData;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import plugins.webbridge.bean.WebBridgeConfig;
import javax.json.Json;
import javax.json.JsonObject;
import java.util.Date;
public class WebBridgeListener implements APIListenerSpi {
private static final MediaType JSON = MediaType.get("application/json; charset=utf-8");
private OkHttpClient client = new OkHttpClient();
@Override
public void accept(JsonObject jsonObject, RequestMetaData requestMetaData, ResponseMetaData responseMetaData) {
WebBridgeConfig config = WebBridgeConfig.get();
if (!config.isBridgeEnabled()) {
return;
}
RequestBody body = RequestBody.create(Json.createObjectBuilder()
.add("uri", requestMetaData.getRequestURI())
.add("date", new Date().getTime())
.add("body", jsonObject)
.build().toString(), JSON);
String url = "http://" + config.getBridgeHost() + ":" + config.getBridgePort() + "/pub";
Request request = new Request.Builder()
.url(url)
.post(body)
.build();
try {
this.client.newCall(request).execute();
} catch (Exception e) {
e.printStackTrace();
}
}
}
## Instruction:
Rename the WebBridge payload property: date -> time
## Code After:
package plugins.webbridge.api;
import logbook.api.APIListenerSpi;
import logbook.proxy.RequestMetaData;
import logbook.proxy.ResponseMetaData;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import plugins.webbridge.bean.WebBridgeConfig;
import javax.json.Json;
import javax.json.JsonObject;
import java.util.Date;
public class WebBridgeListener implements APIListenerSpi {
private static final MediaType JSON = MediaType.get("application/json; charset=utf-8");
private OkHttpClient client = new OkHttpClient();
@Override
public void accept(JsonObject jsonObject, RequestMetaData requestMetaData, ResponseMetaData responseMetaData) {
WebBridgeConfig config = WebBridgeConfig.get();
if (!config.isBridgeEnabled()) {
return;
}
String url = "http://" + config.getBridgeHost() + ":" + config.getBridgePort() + "/pub";
RequestBody body = RequestBody.create(Json.createObjectBuilder()
.add("uri", requestMetaData.getRequestURI())
.add("time", new Date().getTime())
.add("body", jsonObject)
.build().toString(), JSON);
Request request = new Request.Builder()
.url(url)
.post(body)
.build();
try {
this.client.newCall(request).execute();
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
// ... existing code ...
return;
}
String url = "http://" + config.getBridgeHost() + ":" + config.getBridgePort() + "/pub";
RequestBody body = RequestBody.create(Json.createObjectBuilder()
.add("uri", requestMetaData.getRequestURI())
.add("time", new Date().getTime())
.add("body", jsonObject)
.build().toString(), JSON);
Request request = new Request.Builder()
.url(url)
.post(body)
// ... rest of the code ...
|
2f34d442157f86af4fd75c48ea2cf568fbef34f6
|
migrations/versions/223041bb858b_message_contact_association.py
|
migrations/versions/223041bb858b_message_contact_association.py
|
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
# Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a
# script to process contacts from messages, so it's very expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
# Yes, this is a terrible hack. But tools/rerank_contacts.py already
# contains a script to process contacts from messages, so it's very
# expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
Rearrange imports in previous migration.
|
Rearrange imports in previous migration.
According to mg bad things can happen if you try to do stuff outside of a
migration's upgrade() function.
|
Python
|
agpl-3.0
|
wakermahmud/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,rmasters/inbox,jobscore/sync-engine,closeio/nylas,Eagles2F/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,closeio/nylas,Eagles2F/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,rmasters/inbox,PriviPK/privipk-sync-engine,gale320/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,rmasters/inbox,nylas/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,rmasters/inbox,Eagles2F/sync-engine,ErinCall/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,nylas/sync-engine,jobscore/sync-engine,gale320/sync-engine
|
python
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
# Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a
# script to process contacts from messages, so it's very expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
## Instruction:
Rearrange imports in previous migration.
According to mg bad things can happen if you try to do stuff outside of a
migration's upgrade() function.
## Code After:
# revision identifiers, used by Alembic.
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'messagecontactassociation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('message_id', sa.Integer(), nullable=False),
sa.Column('field',
sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'),
nullable=True),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
# Yes, this is a terrible hack. But tools/rerank_contacts.py already
# contains a script to process contacts from messages, so it's very
# expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
rerank_contacts()
def downgrade():
op.drop_table('messagecontactassociation')
|
# ... existing code ...
revision = '223041bb858b'
down_revision = '2c9f3a06de09'
from alembic import op
import sqlalchemy as sa
# ... modified code ...
sa.ForeignKeyConstraint(['message_id'], ['message.id'], ),
sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id')
)
# Yes, this is a terrible hack. But tools/rerank_contacts.py already
# contains a script to process contacts from messages, so it's very
# expedient.
import sys
sys.path.append('./tools')
from rerank_contacts import rerank_contacts
rerank_contacts()
# ... rest of the code ...
|
def6ae3782834ccbd07047ca10d7f6bf7ebde449
|
src/yarrar/Util.h
|
src/yarrar/Util.h
|
namespace yarrar {
template<typename Container, typename Value>
bool contains(const Container& c, const Value& v)
{
return std::find(std::begin(c), std::end(c), v) != std::end(c);
}
template<typename... Args>
std::string format(const std::string& format, Args... args)
{
int neededSize = snprintf(nullptr, 0, format.c_str(), args...);
// If there was an error return the original string.
if(neededSize <= 0)
return format;
neededSize += 1;
std::vector<char> buf(static_cast<size_t> (neededSize));
snprintf(&buf.front(), static_cast<size_t> (neededSize), format.c_str(), args...);
return std::string(&buf.front());
}
cv::Size getScaledDownResolution(const int width,
const int height,
const int preferredWidth);
void rotate(const cv::Mat& src, cv::Mat& dst, const yarrar::Rotation90& rotation);
json11::Json loadJson(const std::string& filePath);
}
|
namespace yarrar {
template<typename Container, typename Value>
bool contains(const Container& c, const Value& v)
{
return std::find(std::begin(c), std::end(c), v) != std::end(c);
}
template<typename... Args>
std::string format(const std::string& format, Args... args)
{
int neededSize = snprintf(nullptr, 0, format.c_str(), args...);
// If there was an error return the original string.
if(neededSize <= 0)
return format;
// Accommodate \0
neededSize += 1;
std::string buf;
buf.resize(static_cast<size_t> (neededSize));
snprintf(&buf.front(), static_cast<size_t> (neededSize), format.c_str(), args...);
return buf;
}
cv::Size getScaledDownResolution(const int width,
const int height,
const int preferredWidth);
void rotate(const cv::Mat& src, cv::Mat& dst, const yarrar::Rotation90& rotation);
json11::Json loadJson(const std::string& filePath);
}
|
Use std::string straight instead of vector<char> in format().
|
Use std::string straight instead of vector<char> in format().
|
C
|
mit
|
ndob/yarrar,ndob/yarrar,ndob/yarrar,ndob/yarrar
|
c
|
## Code Before:
namespace yarrar {
template<typename Container, typename Value>
bool contains(const Container& c, const Value& v)
{
return std::find(std::begin(c), std::end(c), v) != std::end(c);
}
template<typename... Args>
std::string format(const std::string& format, Args... args)
{
int neededSize = snprintf(nullptr, 0, format.c_str(), args...);
// If there was an error return the original string.
if(neededSize <= 0)
return format;
neededSize += 1;
std::vector<char> buf(static_cast<size_t> (neededSize));
snprintf(&buf.front(), static_cast<size_t> (neededSize), format.c_str(), args...);
return std::string(&buf.front());
}
cv::Size getScaledDownResolution(const int width,
const int height,
const int preferredWidth);
void rotate(const cv::Mat& src, cv::Mat& dst, const yarrar::Rotation90& rotation);
json11::Json loadJson(const std::string& filePath);
}
## Instruction:
Use std::string straight instead of vector<char> in format().
## Code After:
namespace yarrar {
template<typename Container, typename Value>
bool contains(const Container& c, const Value& v)
{
return std::find(std::begin(c), std::end(c), v) != std::end(c);
}
template<typename... Args>
std::string format(const std::string& format, Args... args)
{
int neededSize = snprintf(nullptr, 0, format.c_str(), args...);
// If there was an error return the original string.
if(neededSize <= 0)
return format;
// Accommodate \0
neededSize += 1;
std::string buf;
buf.resize(static_cast<size_t> (neededSize));
snprintf(&buf.front(), static_cast<size_t> (neededSize), format.c_str(), args...);
return buf;
}
cv::Size getScaledDownResolution(const int width,
const int height,
const int preferredWidth);
void rotate(const cv::Mat& src, cv::Mat& dst, const yarrar::Rotation90& rotation);
json11::Json loadJson(const std::string& filePath);
}
|
// ... existing code ...
if(neededSize <= 0)
return format;
// Accommodate \0
neededSize += 1;
std::string buf;
buf.resize(static_cast<size_t> (neededSize));
snprintf(&buf.front(), static_cast<size_t> (neededSize), format.c_str(), args...);
return buf;
}
cv::Size getScaledDownResolution(const int width,
// ... rest of the code ...
|
aa360309f387f19f6566d08325cd1aa1131768da
|
bulbs/utils/filters.py
|
bulbs/utils/filters.py
|
from rest_framework import filters
class CaseInsensitiveBooleanFilter(filters.BaseFilterBackend):
"""Set a boolean_fields tuple on the viewset and set this class as a
filter_backend to filter listed fields through a case-insensitive transformation
to be used for filtering. i.e. query params such as 'true' become boolean
True, and params with a value 'false' become boolean False."""
def filter_queryset(self, request, queryset, view):
boolean_fields = getattr(view, 'boolean_fields', None)
if not boolean_fields:
return queryset
boolean_filters = {}
for field in boolean_fields:
if field in request.QUERY_PARAMS:
val = request.QUERY_PARAMS[field]
if val in ['true', 'True']:
boolean_filters[field] = True
elif val in ['false', 'False']:
boolean_filters[field] = False
if len(boolean_filters) > 0:
return queryset.filter(**boolean_filters)
return queryset
|
from rest_framework import filters
class CaseInsensitiveBooleanFilter(filters.BaseFilterBackend):
"""Set a boolean_fields tuple on the viewset and set this class as a
filter_backend to filter listed fields through a case-insensitive transformation
to be used for filtering. i.e. query params such as 'true' become boolean
True, and params with a value 'false' become boolean False."""
def filter_queryset(self, request, queryset, view):
boolean_fields = getattr(view, 'boolean_fields', None)
if not boolean_fields:
return queryset
boolean_filters = {}
for field in boolean_fields:
if field in request.QUERY_PARAMS:
val = request.QUERY_PARAMS[field].lower()
if val == 'true':
boolean_filters[field] = True
elif val == 'false':
boolean_filters[field] = False
if len(boolean_filters) > 0:
return queryset.filter(**boolean_filters)
return queryset
|
Cover every case for CaseInsensitiveBooleanFilter
|
Cover every case for CaseInsensitiveBooleanFilter
|
Python
|
mit
|
pombredanne/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs
|
python
|
## Code Before:
from rest_framework import filters
class CaseInsensitiveBooleanFilter(filters.BaseFilterBackend):
"""Set a boolean_fields tuple on the viewset and set this class as a
filter_backend to filter listed fields through a case-insensitive transformation
to be used for filtering. i.e. query params such as 'true' become boolean
True, and params with a value 'false' become boolean False."""
def filter_queryset(self, request, queryset, view):
boolean_fields = getattr(view, 'boolean_fields', None)
if not boolean_fields:
return queryset
boolean_filters = {}
for field in boolean_fields:
if field in request.QUERY_PARAMS:
val = request.QUERY_PARAMS[field]
if val in ['true', 'True']:
boolean_filters[field] = True
elif val in ['false', 'False']:
boolean_filters[field] = False
if len(boolean_filters) > 0:
return queryset.filter(**boolean_filters)
return queryset
## Instruction:
Cover every case for CaseInsensitiveBooleanFilter
## Code After:
from rest_framework import filters
class CaseInsensitiveBooleanFilter(filters.BaseFilterBackend):
"""Set a boolean_fields tuple on the viewset and set this class as a
filter_backend to filter listed fields through a case-insensitive transformation
to be used for filtering. i.e. query params such as 'true' become boolean
True, and params with a value 'false' become boolean False."""
def filter_queryset(self, request, queryset, view):
boolean_fields = getattr(view, 'boolean_fields', None)
if not boolean_fields:
return queryset
boolean_filters = {}
for field in boolean_fields:
if field in request.QUERY_PARAMS:
val = request.QUERY_PARAMS[field].lower()
if val == 'true':
boolean_filters[field] = True
elif val == 'false':
boolean_filters[field] = False
if len(boolean_filters) > 0:
return queryset.filter(**boolean_filters)
return queryset
|
# ... existing code ...
boolean_filters = {}
for field in boolean_fields:
if field in request.QUERY_PARAMS:
val = request.QUERY_PARAMS[field].lower()
if val == 'true':
boolean_filters[field] = True
elif val == 'false':
boolean_filters[field] = False
if len(boolean_filters) > 0:
# ... rest of the code ...
|
1ae537b291674d864c661298ddcbdd5d0c39a0ab
|
server/src/test/java/io/crate/testing/Asserts.java
|
server/src/test/java/io/crate/testing/Asserts.java
|
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.testing;
import org.hamcrest.Matcher;
import org.junit.jupiter.api.function.Executable;
import org.opentest4j.AssertionFailedError;
public class Asserts {
private Asserts() {}
public static <T extends Throwable> void assertThrows(Executable executable, Matcher<T> matcher) {
try {
executable.execute();
} catch (Throwable t) {
if (matcher.matches(t)) {
return;
}
throw new AssertionFailedError(
String.format("Unmatched %s type thrown with message '%s'",
t.getClass().getCanonicalName(),
t.getMessage()), t);
}
throw new AssertionFailedError("Expected exception to be thrown, but nothing was thrown.");
}
}
|
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.testing;
import static org.hamcrest.MatcherAssert.assertThat;
import org.hamcrest.Matcher;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.function.Executable;
public class Asserts {
private Asserts() {}
public static void assertThrows(Executable executable, Matcher<? super Throwable> matcher) {
try {
executable.execute();
Assertions.fail("Expected exception to be thrown, but nothing was thrown.");
} catch (Throwable t) {
assertThat(t, matcher);
}
}
}
|
Use junit assertions in assertThrows
|
Use junit assertions in assertThrows
I was wondering where a opentest4j error came from.
This changes the newly introduced assertThrows to use the same
verification mechanism we use everywhere else.
|
Java
|
apache-2.0
|
crate/crate,EvilMcJerkface/crate,EvilMcJerkface/crate,crate/crate,EvilMcJerkface/crate,crate/crate
|
java
|
## Code Before:
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.testing;
import org.hamcrest.Matcher;
import org.junit.jupiter.api.function.Executable;
import org.opentest4j.AssertionFailedError;
public class Asserts {
private Asserts() {}
public static <T extends Throwable> void assertThrows(Executable executable, Matcher<T> matcher) {
try {
executable.execute();
} catch (Throwable t) {
if (matcher.matches(t)) {
return;
}
throw new AssertionFailedError(
String.format("Unmatched %s type thrown with message '%s'",
t.getClass().getCanonicalName(),
t.getMessage()), t);
}
throw new AssertionFailedError("Expected exception to be thrown, but nothing was thrown.");
}
}
## Instruction:
Use junit assertions in assertThrows
I was wondering where a opentest4j error came from.
This changes the newly introduced assertThrows to use the same
verification mechanism we use everywhere else.
## Code After:
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.testing;
import static org.hamcrest.MatcherAssert.assertThat;
import org.hamcrest.Matcher;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.function.Executable;
public class Asserts {
private Asserts() {}
public static void assertThrows(Executable executable, Matcher<? super Throwable> matcher) {
try {
executable.execute();
Assertions.fail("Expected exception to be thrown, but nothing was thrown.");
} catch (Throwable t) {
assertThat(t, matcher);
}
}
}
|
...
package io.crate.testing;
import static org.hamcrest.MatcherAssert.assertThat;
import org.hamcrest.Matcher;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.function.Executable;
public class Asserts {
private Asserts() {}
public static void assertThrows(Executable executable, Matcher<? super Throwable> matcher) {
try {
executable.execute();
Assertions.fail("Expected exception to be thrown, but nothing was thrown.");
} catch (Throwable t) {
assertThat(t, matcher);
}
}
}
...
|
9ff75ff858681665141650d4e1ef310265956f35
|
tools/workplace_status.py
|
tools/workplace_status.py
|
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
Make git describe --always return a value
|
Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.
|
Python
|
apache-2.0
|
bazelbuild/bazel-watcher,bazelbuild/bazel-watcher,bazelbuild/bazel-watcher,bazelbuild/bazel-watcher
|
python
|
## Code Before:
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
## Instruction:
Make git describe --always return a value
This means that the latest commit will be stamped if there are no tags.
## Code After:
from __future__ import print_function
from subprocess import Popen, PIPE
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
version.decode("utf-8").replace("\n", ""),
"-dirty" if dirty else "")
)
|
// ... existing code ...
dirty = Popen(["git", "diff-index", "--quiet", "HEAD"], stdout=PIPE).wait() != 0
commit_process = Popen(["git", "describe", "--always", "--tags", "--abbrev=0"], stdout=PIPE)
(version, err) = commit_process.communicate()
print("STABLE_GIT_VERSION %s%s" % (
// ... rest of the code ...
|
cacb032fc46d76632d0384fe235c37c47e899291
|
src/main/java/org/cryptonit/cloud/timestamping/Application.java
|
src/main/java/org/cryptonit/cloud/timestamping/Application.java
|
package org.cryptonit.cloud.timestamping;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.bouncycastle.tsp.TSPException;
import org.bouncycastle.tsp.TimeStampRequest;
import org.bouncycastle.tsp.TimeStampResponse;
import org.cryptonit.cloud.interfaces.TimestampingAuthority;
import org.cryptonit.cloud.interfaces.TimestampingAuthorityFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Mathias Brossard
*/
@Path("/timestamp")
public class Application {
private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
private static TimestampingAuthorityFactory tsaFactory = null;
@POST
public Response timestamp(@Context HttpServletRequest request,
@Context HttpHeaders headers) throws IOException, TSPException {
TimeStampRequest tsq = new TimeStampRequest(request.getInputStream());
TimestampingAuthority tsa = tsaFactory.getTimestampingAuthority(request.getServerName());
TimeStampResponse tsr = tsa.timestamp(tsq);
return Response.ok(tsr.getEncoded()).build();
}
public static void setTimestampingAuthorityFactory(TimestampingAuthorityFactory factory) {
tsaFactory = factory;
}
}
|
package org.cryptonit.cloud.timestamping;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.bouncycastle.tsp.TSPException;
import org.bouncycastle.tsp.TimeStampRequest;
import org.bouncycastle.tsp.TimeStampResponse;
import org.cryptonit.cloud.interfaces.TimestampingAuthority;
import org.cryptonit.cloud.interfaces.TimestampingAuthorityFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Mathias Brossard
*/
@Path("/timestamp")
public class Application {
private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
private static TimestampingAuthorityFactory tsaFactory = null;
@POST
@Consumes("application/timestamp-query")
@Produces("application/timestamp-reply")
public Response timestamp(@Context HttpServletRequest request,
@Context HttpHeaders headers) throws IOException, TSPException {
TimeStampRequest tsq = new TimeStampRequest(request.getInputStream());
TimestampingAuthority tsa = tsaFactory.getTimestampingAuthority(request.getServerName());
TimeStampResponse tsr = tsa.timestamp(tsq);
return Response.ok(tsr.getEncoded()).build();
}
public static void setTimestampingAuthorityFactory(TimestampingAuthorityFactory factory) {
tsaFactory = factory;
}
}
|
Add content types to POST
|
Add content types to POST
|
Java
|
agpl-3.0
|
mbrossard/cryptonit-cloud,mbrossard/cryptonit-cloud,mbrossard/cryptonit-cloud
|
java
|
## Code Before:
package org.cryptonit.cloud.timestamping;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.bouncycastle.tsp.TSPException;
import org.bouncycastle.tsp.TimeStampRequest;
import org.bouncycastle.tsp.TimeStampResponse;
import org.cryptonit.cloud.interfaces.TimestampingAuthority;
import org.cryptonit.cloud.interfaces.TimestampingAuthorityFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Mathias Brossard
*/
@Path("/timestamp")
public class Application {
private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
private static TimestampingAuthorityFactory tsaFactory = null;
@POST
public Response timestamp(@Context HttpServletRequest request,
@Context HttpHeaders headers) throws IOException, TSPException {
TimeStampRequest tsq = new TimeStampRequest(request.getInputStream());
TimestampingAuthority tsa = tsaFactory.getTimestampingAuthority(request.getServerName());
TimeStampResponse tsr = tsa.timestamp(tsq);
return Response.ok(tsr.getEncoded()).build();
}
public static void setTimestampingAuthorityFactory(TimestampingAuthorityFactory factory) {
tsaFactory = factory;
}
}
## Instruction:
Add content types to POST
## Code After:
package org.cryptonit.cloud.timestamping;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.bouncycastle.tsp.TSPException;
import org.bouncycastle.tsp.TimeStampRequest;
import org.bouncycastle.tsp.TimeStampResponse;
import org.cryptonit.cloud.interfaces.TimestampingAuthority;
import org.cryptonit.cloud.interfaces.TimestampingAuthorityFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Mathias Brossard
*/
@Path("/timestamp")
public class Application {
private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
private static TimestampingAuthorityFactory tsaFactory = null;
@POST
@Consumes("application/timestamp-query")
@Produces("application/timestamp-reply")
public Response timestamp(@Context HttpServletRequest request,
@Context HttpHeaders headers) throws IOException, TSPException {
TimeStampRequest tsq = new TimeStampRequest(request.getInputStream());
TimestampingAuthority tsa = tsaFactory.getTimestampingAuthority(request.getServerName());
TimeStampResponse tsr = tsa.timestamp(tsq);
return Response.ok(tsr.getEncoded()).build();
}
public static void setTimestampingAuthorityFactory(TimestampingAuthorityFactory factory) {
tsaFactory = factory;
}
}
|
...
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
...
private static TimestampingAuthorityFactory tsaFactory = null;
@POST
@Consumes("application/timestamp-query")
@Produces("application/timestamp-reply")
public Response timestamp(@Context HttpServletRequest request,
@Context HttpHeaders headers) throws IOException, TSPException {
TimeStampRequest tsq = new TimeStampRequest(request.getInputStream());
...
|
d81a1ba12add244cb246efeae5c292a6d995c9b8
|
deadlinks.py
|
deadlinks.py
|
from operator import itemgetter
from itertools import chain
import os
import yaml
import requests
yaml.load_all
directory = "_companies"
flat = chain.from_iterable
def link_status_company(filename):
(name, _) = filename.rsplit(".", 1);
print("==== {name} ====".format(name=name))
docs = filter(None, yaml.load_all(open(os.path.join(directory, filename))))
positions = flat(map(itemgetter("positions"), filter(lambda doc: "positions" in doc, docs)))
def link_status_position(position):
title = position["title"]
url = position["url"]
print("{title} [ {url} ]".format(title=title, url=url))
response = requests.get(url)
status_code_description = requests.status_codes._codes.get(response.status_code, '-')
print("{} {} {}".format(response.status_code, status_code_description, response.history))
print()
list(map(link_status_position, positions))
list(map(link_status_company, sorted(os.listdir(directory))))
|
from operator import itemgetter
from itertools import chain
import os
import yaml
import requests
yaml.load_all
directory = "_companies"
flat = chain.from_iterable
def link_status_company(filename):
(name, _) = filename.rsplit(".", 1);
print("==== {name} ====".format(name=name))
docs = filter(None, yaml.load_all(open(os.path.join(directory, filename))))
positions = flat(map(itemgetter("positions"), filter(lambda doc: "positions" in doc, docs)))
def link_status_position(position):
title = position["title"]
url = position["url"]
print("{title} [ {url} ]".format(title=title, url=url))
try:
response = requests.get(url, timeout=10)
status_code_description = requests.status_codes._codes.get(response.status_code, '-')
print("{} {} {}".format(response.status_code, status_code_description, response.history))
except Exception as e:
print(e)
print()
list(map(link_status_position, positions))
list(map(link_status_company, sorted(os.listdir(directory))))
|
Add timeout to dead links script
|
Add timeout to dead links script
|
Python
|
apache-2.0
|
Stockholm-AI/stockholm-ai,Stockholm-AI/stockholm-ai,Stockholm-AI/stockholm-ai,Stockholm-AI/stockholm-ai,Stockholm-AI/stockholm-ai
|
python
|
## Code Before:
from operator import itemgetter
from itertools import chain
import os
import yaml
import requests
yaml.load_all
directory = "_companies"
flat = chain.from_iterable
def link_status_company(filename):
(name, _) = filename.rsplit(".", 1);
print("==== {name} ====".format(name=name))
docs = filter(None, yaml.load_all(open(os.path.join(directory, filename))))
positions = flat(map(itemgetter("positions"), filter(lambda doc: "positions" in doc, docs)))
def link_status_position(position):
title = position["title"]
url = position["url"]
print("{title} [ {url} ]".format(title=title, url=url))
response = requests.get(url)
status_code_description = requests.status_codes._codes.get(response.status_code, '-')
print("{} {} {}".format(response.status_code, status_code_description, response.history))
print()
list(map(link_status_position, positions))
list(map(link_status_company, sorted(os.listdir(directory))))
## Instruction:
Add timeout to dead links script
## Code After:
from operator import itemgetter
from itertools import chain
import os
import yaml
import requests
yaml.load_all
directory = "_companies"
flat = chain.from_iterable
def link_status_company(filename):
(name, _) = filename.rsplit(".", 1);
print("==== {name} ====".format(name=name))
docs = filter(None, yaml.load_all(open(os.path.join(directory, filename))))
positions = flat(map(itemgetter("positions"), filter(lambda doc: "positions" in doc, docs)))
def link_status_position(position):
title = position["title"]
url = position["url"]
print("{title} [ {url} ]".format(title=title, url=url))
try:
response = requests.get(url, timeout=10)
status_code_description = requests.status_codes._codes.get(response.status_code, '-')
print("{} {} {}".format(response.status_code, status_code_description, response.history))
except Exception as e:
print(e)
print()
list(map(link_status_position, positions))
list(map(link_status_company, sorted(os.listdir(directory))))
|
// ... existing code ...
url = position["url"]
print("{title} [ {url} ]".format(title=title, url=url))
try:
response = requests.get(url, timeout=10)
status_code_description = requests.status_codes._codes.get(response.status_code, '-')
print("{} {} {}".format(response.status_code, status_code_description, response.history))
except Exception as e:
print(e)
print()
// ... rest of the code ...
|
268c4458161ce754a82e3986787f6703f9122e3e
|
trackmybmi/users/factories.py
|
trackmybmi/users/factories.py
|
import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
Replace User import with call to get_user_model()
|
Replace User import with call to get_user_model()
|
Python
|
mit
|
ojh/trackmybmi
|
python
|
## Code Before:
import factory
from django.contrib.auth.hashers import make_password
from .models import Friendship, User
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
## Instruction:
Replace User import with call to get_user_model()
## Code After:
import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
"""Create users with default attributes."""
class Meta:
model = User
email = factory.Sequence(lambda n: 'user.{}@test.test'.format(n))
password = make_password('password')
class FriendshipFactory(factory.django.DjangoModelFactory):
class Meta:
model = Friendship
initiator = factory.SubFactory(UserFactory)
recipient = factory.SubFactory(UserFactory)
|
...
import factory
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from .models import Friendship
User = get_user_model()
class UserFactory(factory.django.DjangoModelFactory):
...
|
891ca8ee117f462a1648e954b756f1d29a5f527c
|
tests/test_errors.py
|
tests/test_errors.py
|
"""Tests for errors.py"""
import aiohttp
def test_bad_status_line1():
err = aiohttp.BadStatusLine(b'')
assert str(err) == "b''"
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
|
"""Tests for errors.py"""
import aiohttp
def test_bad_status_line1():
err = aiohttp.BadStatusLine(b'')
assert str(err) == "b''"
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
def test_fingerprint_mismatch():
err = aiohttp.FingerprintMismatch('exp', 'got', 'host', 8888)
expected = '<FingerprintMismatch expected=exp got=got host=host port=8888>'
assert expected == repr(err)
|
Add a test for FingerprintMismatch repr
|
Add a test for FingerprintMismatch repr
|
Python
|
apache-2.0
|
jettify/aiohttp,esaezgil/aiohttp,z2v/aiohttp,arthurdarcet/aiohttp,pfreixes/aiohttp,z2v/aiohttp,mind1master/aiohttp,KeepSafe/aiohttp,mind1master/aiohttp,juliatem/aiohttp,hellysmile/aiohttp,esaezgil/aiohttp,esaezgil/aiohttp,arthurdarcet/aiohttp,panda73111/aiohttp,pfreixes/aiohttp,z2v/aiohttp,alex-eri/aiohttp-1,singulared/aiohttp,moden-py/aiohttp,singulared/aiohttp,AraHaanOrg/aiohttp,KeepSafe/aiohttp,arthurdarcet/aiohttp,hellysmile/aiohttp,alex-eri/aiohttp-1,singulared/aiohttp,jettify/aiohttp,panda73111/aiohttp,alex-eri/aiohttp-1,moden-py/aiohttp,playpauseandstop/aiohttp,jettify/aiohttp,KeepSafe/aiohttp,rutsky/aiohttp,juliatem/aiohttp,AraHaanOrg/aiohttp,mind1master/aiohttp,rutsky/aiohttp,panda73111/aiohttp,Eyepea/aiohttp,moden-py/aiohttp,rutsky/aiohttp
|
python
|
## Code Before:
"""Tests for errors.py"""
import aiohttp
def test_bad_status_line1():
err = aiohttp.BadStatusLine(b'')
assert str(err) == "b''"
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
## Instruction:
Add a test for FingerprintMismatch repr
## Code After:
"""Tests for errors.py"""
import aiohttp
def test_bad_status_line1():
err = aiohttp.BadStatusLine(b'')
assert str(err) == "b''"
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
def test_fingerprint_mismatch():
err = aiohttp.FingerprintMismatch('exp', 'got', 'host', 8888)
expected = '<FingerprintMismatch expected=exp got=got host=host port=8888>'
assert expected == repr(err)
|
# ... existing code ...
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
def test_fingerprint_mismatch():
err = aiohttp.FingerprintMismatch('exp', 'got', 'host', 8888)
expected = '<FingerprintMismatch expected=exp got=got host=host port=8888>'
assert expected == repr(err)
# ... rest of the code ...
|
c5f9b9bc76f797156b73a2bb26b80ebf23d62fe4
|
polyaxon/pipelines/celery_task.py
|
polyaxon/pipelines/celery_task.py
|
from pipelines.models import Operation
from polyaxon.celery_api import CeleryTask
class OperationTask(CeleryTask):
"""Base operation celery task with basic logging."""
_operation = None
def run(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
super(OperationTask, self).run(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
super(OperationTask, self).on_failure(exc, task_id, args, kwargs, einfo)
self._operation.on_failure()
def on_retry(self, exc, task_id, args, kwargs, einfo):
super(OperationTask, self).on_retry(exc, task_id, args, kwargs, einfo)
self._operation.on_retry()
def on_success(self, retval, task_id, args, kwargs):
"""Send email notification and a file, if requested to do so by a user"""
super(OperationTask, self).on_success(retval, task_id, args, kwargs)
self._operation.on_success()
|
from pipelines.models import Operation
from polyaxon.celery_api import CeleryTask
class OperationTask(CeleryTask):
"""Base operation celery task with basic logging."""
_operation = None
def __call__(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
self._operation.on_run()
self.max_retries = self._operation.max_retries
self.countdown = self._operation.get_countdown(self.request.retries)
super(OperationTask, self).__call__(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
super(OperationTask, self).on_failure(exc, task_id, args, kwargs, einfo)
self._operation.on_failure()
def on_retry(self, exc, task_id, args, kwargs, einfo):
super(OperationTask, self).on_retry(exc, task_id, args, kwargs, einfo)
self._operation.on_retry()
def on_success(self, retval, task_id, args, kwargs):
"""Send email notification and a file, if requested to do so by a user"""
super(OperationTask, self).on_success(retval, task_id, args, kwargs)
self._operation.on_success()
|
Update OperationCelery with max_retries and countdown logic
|
Update OperationCelery with max_retries and countdown logic
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
python
|
## Code Before:
from pipelines.models import Operation
from polyaxon.celery_api import CeleryTask
class OperationTask(CeleryTask):
"""Base operation celery task with basic logging."""
_operation = None
def run(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
super(OperationTask, self).run(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
super(OperationTask, self).on_failure(exc, task_id, args, kwargs, einfo)
self._operation.on_failure()
def on_retry(self, exc, task_id, args, kwargs, einfo):
super(OperationTask, self).on_retry(exc, task_id, args, kwargs, einfo)
self._operation.on_retry()
def on_success(self, retval, task_id, args, kwargs):
"""Send email notification and a file, if requested to do so by a user"""
super(OperationTask, self).on_success(retval, task_id, args, kwargs)
self._operation.on_success()
## Instruction:
Update OperationCelery with max_retries and countdown logic
## Code After:
from pipelines.models import Operation
from polyaxon.celery_api import CeleryTask
class OperationTask(CeleryTask):
"""Base operation celery task with basic logging."""
_operation = None
def __call__(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
self._operation.on_run()
self.max_retries = self._operation.max_retries
self.countdown = self._operation.get_countdown(self.request.retries)
super(OperationTask, self).__call__(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
super(OperationTask, self).on_failure(exc, task_id, args, kwargs, einfo)
self._operation.on_failure()
def on_retry(self, exc, task_id, args, kwargs, einfo):
super(OperationTask, self).on_retry(exc, task_id, args, kwargs, einfo)
self._operation.on_retry()
def on_success(self, retval, task_id, args, kwargs):
"""Send email notification and a file, if requested to do so by a user"""
super(OperationTask, self).on_success(retval, task_id, args, kwargs)
self._operation.on_success()
|
...
"""Base operation celery task with basic logging."""
_operation = None
def __call__(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
self._operation.on_run()
self.max_retries = self._operation.max_retries
self.countdown = self._operation.get_countdown(self.request.retries)
super(OperationTask, self).__call__(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
...
|
3ef1531f6934055a416cdddc694f6ca75694d649
|
voltron/common.py
|
voltron/common.py
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
Make use of expanduser() more sane
|
Make use of expanduser() more sane
|
Python
|
mit
|
snare/voltron,snare/voltron,snare/voltron,snare/voltron
|
python
|
## Code Before:
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
## Instruction:
Make use of expanduser() more sane
## Code After:
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
...
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
...
|
902e4ce0848cc2c3afa7192a85d413ed2919c798
|
csunplugged/tests/plugging_it_in/models/test_testcase.py
|
csunplugged/tests/plugging_it_in/models/test_testcase.py
|
from plugging_it_in.models import TestCase
from tests.BaseTestWithDB import BaseTestWithDB
from tests.topics.TopicsTestDataGenerator import TopicsTestDataGenerator
class TestCaseModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.test_data = TopicsTestDataGenerator()
def create_testcase(self):
topic = self.test_data.create_topic(1)
difficulty = self.test_data.create_difficulty_level(1)
challenge = self.test_data.create_programming_challenge(topic, 1, difficulty)
self.test_data.create_programming_challenge_test_case(1, challenge)
self.test_case = TestCase.objects.get(id=1)
def test_testcase_verbose_model_name(self):
self.create_testcase()
verbose_name = self.test_case._meta.verbose_name
self.assertEquals(verbose_name, "Test Case")
|
from tests.BaseTestWithDB import BaseTestWithDB
from tests.topics.TopicsTestDataGenerator import TopicsTestDataGenerator
class TestCaseModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.test_data = TopicsTestDataGenerator()
def create_testcase(self):
topic = self.test_data.create_topic(1)
difficulty = self.test_data.create_difficulty_level(1)
challenge = self.test_data.create_programming_challenge(topic, 1, difficulty)
self.test_case = self.test_data.create_programming_challenge_test_case(1, challenge)
def test_testcase_verbose_model_name(self):
self.create_testcase()
verbose_name = self.test_case._meta.verbose_name
self.assertEquals(verbose_name, "Test Case")
|
Fix models unit test for plugging it in
|
Fix models unit test for plugging it in
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
python
|
## Code Before:
from plugging_it_in.models import TestCase
from tests.BaseTestWithDB import BaseTestWithDB
from tests.topics.TopicsTestDataGenerator import TopicsTestDataGenerator
class TestCaseModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.test_data = TopicsTestDataGenerator()
def create_testcase(self):
topic = self.test_data.create_topic(1)
difficulty = self.test_data.create_difficulty_level(1)
challenge = self.test_data.create_programming_challenge(topic, 1, difficulty)
self.test_data.create_programming_challenge_test_case(1, challenge)
self.test_case = TestCase.objects.get(id=1)
def test_testcase_verbose_model_name(self):
self.create_testcase()
verbose_name = self.test_case._meta.verbose_name
self.assertEquals(verbose_name, "Test Case")
## Instruction:
Fix models unit test for plugging it in
## Code After:
from tests.BaseTestWithDB import BaseTestWithDB
from tests.topics.TopicsTestDataGenerator import TopicsTestDataGenerator
class TestCaseModelTest(BaseTestWithDB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.test_data = TopicsTestDataGenerator()
def create_testcase(self):
topic = self.test_data.create_topic(1)
difficulty = self.test_data.create_difficulty_level(1)
challenge = self.test_data.create_programming_challenge(topic, 1, difficulty)
self.test_case = self.test_data.create_programming_challenge_test_case(1, challenge)
def test_testcase_verbose_model_name(self):
self.create_testcase()
verbose_name = self.test_case._meta.verbose_name
self.assertEquals(verbose_name, "Test Case")
|
...
from tests.BaseTestWithDB import BaseTestWithDB
from tests.topics.TopicsTestDataGenerator import TopicsTestDataGenerator
...
difficulty = self.test_data.create_difficulty_level(1)
challenge = self.test_data.create_programming_challenge(topic, 1, difficulty)
self.test_case = self.test_data.create_programming_challenge_test_case(1, challenge)
def test_testcase_verbose_model_name(self):
self.create_testcase()
...
|
32994f27d1644415e8cd4a22f1b47d4938d3620c
|
fulfil_client/oauth.py
|
fulfil_client/oauth.py
|
from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
|
from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
|
Add provision to pass extra args in auth url
|
Add provision to pass extra args in auth url
|
Python
|
isc
|
fulfilio/fulfil-python-api,sharoonthomas/fulfil-python-api
|
python
|
## Code Before:
from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(self.base_url + 'oauth/authorize')
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
## Instruction:
Add provision to pass extra args in auth url
## Code After:
from requests_oauthlib import OAuth2Session
class Session(OAuth2Session):
client_id = None
client_secret = None
def __init__(self, subdomain, **kwargs):
client_id = self.client_id
client_secret = self.client_secret
self.fulfil_subdomain = subdomain
if not (client_id and client_secret):
raise Exception('Missing client_id or client_secret.')
super(Session, self).__init__(client_id=client_id, **kwargs)
@classmethod
def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret
@property
def base_url(self):
if self.fulfil_subdomain == 'localhost':
return 'http://localhost:8000/'
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
return self.fetch_token(
token_url, client_secret=self.client_secret, code=code
)
|
# ... existing code ...
else:
return 'https://%s.fulfil.io/' % self.fulfil_subdomain
def create_authorization_url(self, redirect_uri, scope, **kwargs):
self.redirect_uri = redirect_uri
self.scope = scope
return self.authorization_url(
self.base_url + 'oauth/authorize', **kwargs)
def get_token(self, code):
token_url = self.base_url + 'oauth/token'
# ... rest of the code ...
|
498a068576cbbe2935e2db32076184cdb1b3d8fc
|
dspace/modules/api-stats/src/main/java/org/datadryad/journalstatistics/statistics/DefaultStatisticsPackage.java
|
dspace/modules/api-stats/src/main/java/org/datadryad/journalstatistics/statistics/DefaultStatisticsPackage.java
|
/*
*/
package org.datadryad.journalstatistics.statistics;
import java.util.ArrayList;
import java.util.List;
import org.datadryad.journalstatistics.extractor.DataPackageCount;
import org.dspace.core.Context;
/**
*
* @author Dan Leehr <[email protected]>
*/
public class DefaultStatisticsPackage implements StatisticsPackage {
private List<Statistic> statistics = new ArrayList<Statistic>();
public DefaultStatisticsPackage(Context context) {
statistics.add(new Statistic<Integer>("Data packages by journal", new DataPackageCount(context)));
}
@Override
public void run(String journalName) {
for(Statistic s : statistics) {
s.extractAndStore(journalName);
System.out.println(s);
}
}
}
|
/*
*/
package org.datadryad.journalstatistics.statistics;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.datadryad.journalstatistics.extractor.DataFileCount;
import org.datadryad.journalstatistics.extractor.DataFileTotalSize;
import org.datadryad.journalstatistics.extractor.DataPackageCount;
import org.datadryad.journalstatistics.extractor.DataPackageUnpublishedCount;
import org.datadryad.journalstatistics.extractor.EmbargoedDataFileCount;
import org.dspace.core.Context;
/**
*
* @author Dan Leehr <[email protected]>
*/
public class DefaultStatisticsPackage implements StatisticsPackage {
private List<Statistic> statistics = new ArrayList<Statistic>();
public DefaultStatisticsPackage(Context context) {
statistics.add(new Statistic<Integer>("Data packages count", new DataPackageCount(context)));
statistics.add(new Statistic<Integer>("Data files count", new DataFileCount(context)));
statistics.add(new Statistic<Long>("Data files total size", new DataFileTotalSize(context)));
statistics.add(new Statistic<Map<String, Integer>>("Unpublished data packages", new DataPackageUnpublishedCount(context)));
statistics.add(new Statistic<Integer>("Embargoed data files", new EmbargoedDataFileCount(context)));
}
@Override
public void run(String journalName) {
for(Statistic s : statistics) {
s.extractAndStore(journalName);
System.out.println(s);
}
}
}
|
Add extractors to default statistics package
|
Add extractors to default statistics package
|
Java
|
bsd-3-clause
|
jimallman/dryad-repo,rnathanday/dryad-repo,jimallman/dryad-repo,jamie-dryad/dryad-repo,jimallman/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,jamie-dryad/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,rnathanday/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,jamie-dryad/dryad-repo,jamie-dryad/dryad-repo,ojacobson/dryad-repo,jamie-dryad/dryad-repo,rnathanday/dryad-repo
|
java
|
## Code Before:
/*
*/
package org.datadryad.journalstatistics.statistics;
import java.util.ArrayList;
import java.util.List;
import org.datadryad.journalstatistics.extractor.DataPackageCount;
import org.dspace.core.Context;
/**
*
* @author Dan Leehr <[email protected]>
*/
public class DefaultStatisticsPackage implements StatisticsPackage {
private List<Statistic> statistics = new ArrayList<Statistic>();
public DefaultStatisticsPackage(Context context) {
statistics.add(new Statistic<Integer>("Data packages by journal", new DataPackageCount(context)));
}
@Override
public void run(String journalName) {
for(Statistic s : statistics) {
s.extractAndStore(journalName);
System.out.println(s);
}
}
}
## Instruction:
Add extractors to default statistics package
## Code After:
/*
*/
package org.datadryad.journalstatistics.statistics;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.datadryad.journalstatistics.extractor.DataFileCount;
import org.datadryad.journalstatistics.extractor.DataFileTotalSize;
import org.datadryad.journalstatistics.extractor.DataPackageCount;
import org.datadryad.journalstatistics.extractor.DataPackageUnpublishedCount;
import org.datadryad.journalstatistics.extractor.EmbargoedDataFileCount;
import org.dspace.core.Context;
/**
*
* @author Dan Leehr <[email protected]>
*/
public class DefaultStatisticsPackage implements StatisticsPackage {
private List<Statistic> statistics = new ArrayList<Statistic>();
public DefaultStatisticsPackage(Context context) {
statistics.add(new Statistic<Integer>("Data packages count", new DataPackageCount(context)));
statistics.add(new Statistic<Integer>("Data files count", new DataFileCount(context)));
statistics.add(new Statistic<Long>("Data files total size", new DataFileTotalSize(context)));
statistics.add(new Statistic<Map<String, Integer>>("Unpublished data packages", new DataPackageUnpublishedCount(context)));
statistics.add(new Statistic<Integer>("Embargoed data files", new EmbargoedDataFileCount(context)));
}
@Override
public void run(String journalName) {
for(Statistic s : statistics) {
s.extractAndStore(journalName);
System.out.println(s);
}
}
}
|
# ... existing code ...
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.datadryad.journalstatistics.extractor.DataFileCount;
import org.datadryad.journalstatistics.extractor.DataFileTotalSize;
import org.datadryad.journalstatistics.extractor.DataPackageCount;
import org.datadryad.journalstatistics.extractor.DataPackageUnpublishedCount;
import org.datadryad.journalstatistics.extractor.EmbargoedDataFileCount;
import org.dspace.core.Context;
/**
# ... modified code ...
public class DefaultStatisticsPackage implements StatisticsPackage {
private List<Statistic> statistics = new ArrayList<Statistic>();
public DefaultStatisticsPackage(Context context) {
statistics.add(new Statistic<Integer>("Data packages count", new DataPackageCount(context)));
statistics.add(new Statistic<Integer>("Data files count", new DataFileCount(context)));
statistics.add(new Statistic<Long>("Data files total size", new DataFileTotalSize(context)));
statistics.add(new Statistic<Map<String, Integer>>("Unpublished data packages", new DataPackageUnpublishedCount(context)));
statistics.add(new Statistic<Integer>("Embargoed data files", new EmbargoedDataFileCount(context)));
}
@Override
public void run(String journalName) {
# ... rest of the code ...
|
d3e2a11f72f6de811f39ac10aa0abde74b99d269
|
hcibench/pipeline/__init__.py
|
hcibench/pipeline/__init__.py
|
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
Make Estimator importable from pipeline.
|
Make Estimator importable from pipeline.
|
Python
|
mit
|
ucdrascal/axopy,ucdrascal/hcibench
|
python
|
## Code Before:
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
## Instruction:
Make Estimator importable from pipeline.
## Code After:
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
'PassthroughPipeline',
'Windower',
'Filter',
'FeatureExtractor',
'Estimator']
|
# ... existing code ...
from .base import PipelineBlock, Pipeline, PassthroughPipeline
from .common import Windower, Filter, FeatureExtractor, Estimator
__all__ = ['PipelineBlock',
'Pipeline',
# ... rest of the code ...
|
10be723bf9396c3e513d09ce2a16a3aee0eebe36
|
setup.py
|
setup.py
|
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='[email protected]',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='[email protected]',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
Make sure the package is built before it is tested
|
Make sure the package is built before it is tested
|
Python
|
bsd-3-clause
|
barentsen/reproject,mwcraig/reproject,astrofrog/reproject,astrofrog/reproject,bsipocz/reproject,barentsen/reproject,barentsen/reproject,astrofrog/reproject,bsipocz/reproject,mwcraig/reproject
|
python
|
## Code Before:
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='[email protected]',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
## Instruction:
Make sure the package is built before it is tested
## Code After:
import os
from distutils.core import setup, Extension, Command
from distutils.command.sdist import sdist
from distutils.command.build_py import build_py
from numpy import get_include as get_numpy_include
numpy_includes = get_numpy_include()
ext_modules = [Extension("reproject._overlap_wrapper",
['reproject/_overlap_wrapper.c', 'reproject/overlapArea.c'],
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
setup(name='reproject',
version="0.1.0",
author='Thomas Robitaille',
author_email='[email protected]',
packages=['reproject', 'reproject.tests'],
cmdclass = {'test': PyTest},
ext_modules = ext_modules
)
|
# ... existing code ...
include_dirs=[numpy_includes])]
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os
import shutil
import tempfile
# First ensure that we build the package so that 2to3 gets executed
self.reinitialize_command('build')
self.run_command('build')
build_cmd = self.get_finalized_command('build')
new_path = os.path.abspath(build_cmd.build_lib)
# Copy the build to a temporary directory for the purposes of testing
# - this avoids creating pyc and __pycache__ directories inside the
# build directory
tmp_dir = tempfile.mkdtemp(prefix='reprojection-test-')
testing_path = os.path.join(tmp_dir, os.path.basename(new_path))
shutil.copytree(new_path, testing_path)
import sys
import subprocess
errno = subprocess.call([sys.executable, os.path.abspath('runtests.py')], cwd=testing_path)
raise SystemExit(errno)
# ... rest of the code ...
|
fa0eb2e8957bd6930be3bdec2bb61411b57279b6
|
app/src/main/java/com/jonasgerdes/schauburgr/network/image/SchauburgImageUrlCreator.java
|
app/src/main/java/com/jonasgerdes/schauburgr/network/image/SchauburgImageUrlCreator.java
|
package com.jonasgerdes.schauburgr.network.image;
import com.jonasgerdes.schauburgr.model.Movie;
/**
* Created by jonas on 07.03.2017.
*/
public class SchauburgImageUrlCreator implements ImageUrlCreator {
private String mBaseUrl;
public SchauburgImageUrlCreator(String baseUrl) {
mBaseUrl = baseUrl;
}
@Override
public String getPosterImageUrl(Movie movie) {
return mBaseUrl + "generated/" + movie.getResourceId() + ".jpg";
}
}
|
package com.jonasgerdes.schauburgr.network.image;
import android.util.Base64;
import com.jonasgerdes.schauburgr.model.Movie;
/**
* Created by jonas on 07.03.2017.
*/
public class SchauburgImageUrlCreator implements ImageUrlCreator {
private String mBaseUrl;
public SchauburgImageUrlCreator(String baseUrl) {
mBaseUrl = baseUrl;
}
@Override
public String getPosterImageUrl(Movie movie) {
//Encode title as Base64 and append it to url to prevent issue when same image is used for
//different movie while still allowing image to be cached as long as used for same movie
byte[] titleBytes = movie.getTitle().getBytes();
String fingerprint = Base64.encodeToString(titleBytes, Base64.URL_SAFE | Base64.NO_PADDING);
return mBaseUrl + "generated/" + movie.getResourceId() + ".jpg?f=" + fingerprint;
}
}
|
Fix wrong posters beeing displayed
|
Fix wrong posters beeing displayed
|
Java
|
mit
|
JGerdes/Schauburgr
|
java
|
## Code Before:
package com.jonasgerdes.schauburgr.network.image;
import com.jonasgerdes.schauburgr.model.Movie;
/**
* Created by jonas on 07.03.2017.
*/
public class SchauburgImageUrlCreator implements ImageUrlCreator {
private String mBaseUrl;
public SchauburgImageUrlCreator(String baseUrl) {
mBaseUrl = baseUrl;
}
@Override
public String getPosterImageUrl(Movie movie) {
return mBaseUrl + "generated/" + movie.getResourceId() + ".jpg";
}
}
## Instruction:
Fix wrong posters beeing displayed
## Code After:
package com.jonasgerdes.schauburgr.network.image;
import android.util.Base64;
import com.jonasgerdes.schauburgr.model.Movie;
/**
* Created by jonas on 07.03.2017.
*/
public class SchauburgImageUrlCreator implements ImageUrlCreator {
private String mBaseUrl;
public SchauburgImageUrlCreator(String baseUrl) {
mBaseUrl = baseUrl;
}
@Override
public String getPosterImageUrl(Movie movie) {
//Encode title as Base64 and append it to url to prevent issue when same image is used for
//different movie while still allowing image to be cached as long as used for same movie
byte[] titleBytes = movie.getTitle().getBytes();
String fingerprint = Base64.encodeToString(titleBytes, Base64.URL_SAFE | Base64.NO_PADDING);
return mBaseUrl + "generated/" + movie.getResourceId() + ".jpg?f=" + fingerprint;
}
}
|
# ... existing code ...
package com.jonasgerdes.schauburgr.network.image;
import android.util.Base64;
import com.jonasgerdes.schauburgr.model.Movie;
# ... modified code ...
@Override
public String getPosterImageUrl(Movie movie) {
//Encode title as Base64 and append it to url to prevent issue when same image is used for
//different movie while still allowing image to be cached as long as used for same movie
byte[] titleBytes = movie.getTitle().getBytes();
String fingerprint = Base64.encodeToString(titleBytes, Base64.URL_SAFE | Base64.NO_PADDING);
return mBaseUrl + "generated/" + movie.getResourceId() + ".jpg?f=" + fingerprint;
}
}
# ... rest of the code ...
|
5651445944bce163a2c3f746d6ac1acd9ae76032
|
numpy/array_api/tests/test_asarray.py
|
numpy/array_api/tests/test_asarray.py
|
import numpy as np
def test_fast_return():
""""""
a = np.array([1, 2, 3], dtype='i')
assert np.asarray(a) is a
assert np.asarray(a, dtype='i') is a
# This may produce a new view or a copy, but is never the same object.
assert np.asarray(a, dtype='l') is not a
unequal_type = np.dtype('i', metadata={'spam': True})
b = np.asarray(a, dtype=unequal_type)
assert b is not a
assert b.base is a
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# A quirk of the metadata test is that equivalent metadata dicts are still
# separate objects and so don't evaluate as the same array type description.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
assert c.dtype is equivalent_requirement
|
import numpy as np
def test_fast_return():
""""""
a = np.array([1, 2, 3], dtype='i')
assert np.asarray(a) is a
assert np.asarray(a, dtype='i') is a
# This may produce a new view or a copy, but is never the same object.
assert np.asarray(a, dtype='l') is not a
unequal_type = np.dtype('i', metadata={'spam': True})
b = np.asarray(a, dtype=unequal_type)
assert b is not a
assert b.base is a
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# The descriptors are equivalent, but we have created
# distinct dtype instances.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
assert c.dtype is equivalent_requirement
|
Update comment and obey formatting requirements.
|
Update comment and obey formatting requirements.
|
Python
|
bsd-3-clause
|
charris/numpy,mhvk/numpy,mattip/numpy,mattip/numpy,mattip/numpy,numpy/numpy,mhvk/numpy,endolith/numpy,charris/numpy,numpy/numpy,endolith/numpy,charris/numpy,numpy/numpy,endolith/numpy,endolith/numpy,charris/numpy,mattip/numpy,numpy/numpy,mhvk/numpy,mhvk/numpy,mhvk/numpy
|
python
|
## Code Before:
import numpy as np
def test_fast_return():
""""""
a = np.array([1, 2, 3], dtype='i')
assert np.asarray(a) is a
assert np.asarray(a, dtype='i') is a
# This may produce a new view or a copy, but is never the same object.
assert np.asarray(a, dtype='l') is not a
unequal_type = np.dtype('i', metadata={'spam': True})
b = np.asarray(a, dtype=unequal_type)
assert b is not a
assert b.base is a
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# A quirk of the metadata test is that equivalent metadata dicts are still
# separate objects and so don't evaluate as the same array type description.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
assert c.dtype is equivalent_requirement
## Instruction:
Update comment and obey formatting requirements.
## Code After:
import numpy as np
def test_fast_return():
""""""
a = np.array([1, 2, 3], dtype='i')
assert np.asarray(a) is a
assert np.asarray(a, dtype='i') is a
# This may produce a new view or a copy, but is never the same object.
assert np.asarray(a, dtype='l') is not a
unequal_type = np.dtype('i', metadata={'spam': True})
b = np.asarray(a, dtype=unequal_type)
assert b is not a
assert b.base is a
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# The descriptors are equivalent, but we have created
# distinct dtype instances.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
assert c.dtype is equivalent_requirement
|
# ... existing code ...
equivalent_requirement = np.dtype('i', metadata={'spam': True})
c = np.asarray(b, dtype=equivalent_requirement)
# The descriptors are equivalent, but we have created
# distinct dtype instances.
assert unequal_type == equivalent_requirement
assert unequal_type is not equivalent_requirement
assert c is not b
# ... rest of the code ...
|
c55dbf067d85c3a060a6ffeff2aad24991e95eae
|
pandas/tests/series/test_validate.py
|
pandas/tests/series/test_validate.py
|
import pytest
from pandas.core.series import Series
class TestSeriesValidate(object):
"""Tests for error handling related to data types of method arguments."""
s = Series([1, 2, 3, 4, 5])
def test_validate_bool_args(self):
# Tests for error handling related to boolean arguments.
invalid_values = [1, "True", [1, 2, 3], 5.0]
for value in invalid_values:
with pytest.raises(ValueError):
self.s.reset_index(inplace=value)
with pytest.raises(ValueError):
self.s._set_name(name='hello', inplace=value)
with pytest.raises(ValueError):
self.s.sort_values(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.rename(inplace=value)
with pytest.raises(ValueError):
self.s.dropna(inplace=value)
|
import pytest
from pandas.core.series import Series
class TestSeriesValidate(object):
"""Tests for error handling related to data types of method arguments."""
s = Series([1, 2, 3, 4, 5])
def test_validate_bool_args(self):
# Tests for error handling related to boolean arguments.
invalid_values = [1, "True", [1, 2, 3], 5.0]
for value in invalid_values:
with pytest.raises(ValueError):
self.s.reset_index(inplace=value)
with pytest.raises(ValueError):
self.s._set_name(name='hello', inplace=value)
with pytest.raises(ValueError):
self.s.sort_values(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.rename(inplace=value)
with pytest.raises(ValueError):
self.s.dropna(inplace=value)
|
Remove duplicate Series sort_index check
|
MAINT: Remove duplicate Series sort_index check
Duplicate boolean validation check for sort_index in series/test_validate.py
|
Python
|
bsd-3-clause
|
pratapvardhan/pandas,rs2/pandas,winklerand/pandas,jorisvandenbossche/pandas,zfrenchee/pandas,winklerand/pandas,TomAugspurger/pandas,pandas-dev/pandas,harisbal/pandas,jmmease/pandas,pratapvardhan/pandas,TomAugspurger/pandas,DGrady/pandas,jorisvandenbossche/pandas,cython-testbed/pandas,zfrenchee/pandas,nmartensen/pandas,louispotok/pandas,dsm054/pandas,Winand/pandas,gfyoung/pandas,MJuddBooth/pandas,gfyoung/pandas,jorisvandenbossche/pandas,rs2/pandas,cbertinato/pandas,zfrenchee/pandas,DGrady/pandas,DGrady/pandas,louispotok/pandas,harisbal/pandas,pratapvardhan/pandas,jmmease/pandas,harisbal/pandas,cython-testbed/pandas,cython-testbed/pandas,Winand/pandas,dsm054/pandas,MJuddBooth/pandas,jreback/pandas,pandas-dev/pandas,toobaz/pandas,winklerand/pandas,jmmease/pandas,nmartensen/pandas,Winand/pandas,winklerand/pandas,datapythonista/pandas,amolkahat/pandas,gfyoung/pandas,MJuddBooth/pandas,dsm054/pandas,louispotok/pandas,jmmease/pandas,toobaz/pandas,dsm054/pandas,MJuddBooth/pandas,winklerand/pandas,pratapvardhan/pandas,jreback/pandas,cbertinato/pandas,pandas-dev/pandas,kdebrab/pandas,cbertinato/pandas,GuessWhoSamFoo/pandas,amolkahat/pandas,toobaz/pandas,harisbal/pandas,DGrady/pandas,kdebrab/pandas,pratapvardhan/pandas,kdebrab/pandas,jmmease/pandas,jmmease/pandas,DGrady/pandas,amolkahat/pandas,GuessWhoSamFoo/pandas,toobaz/pandas,DGrady/pandas,zfrenchee/pandas,gfyoung/pandas,GuessWhoSamFoo/pandas,cython-testbed/pandas,Winand/pandas,GuessWhoSamFoo/pandas,rs2/pandas,Winand/pandas,Winand/pandas,nmartensen/pandas,datapythonista/pandas,louispotok/pandas,amolkahat/pandas,harisbal/pandas,jreback/pandas,cbertinato/pandas,kdebrab/pandas,jorisvandenbossche/pandas,nmartensen/pandas,datapythonista/pandas,amolkahat/pandas,winklerand/pandas,jreback/pandas,jreback/pandas,TomAugspurger/pandas,cython-testbed/pandas,MJuddBooth/pandas,GuessWhoSamFoo/pandas,datapythonista/pandas,pandas-dev/pandas,rs2/pandas,kdebrab/pandas,dsm054/pandas,louispotok/pandas,TomAugspurger/pandas,nmartensen/pandas,nmartensen/pandas,zfrenchee/pandas,cbertinato/pandas,gfyoung/pandas,toobaz/pandas
|
python
|
## Code Before:
import pytest
from pandas.core.series import Series
class TestSeriesValidate(object):
"""Tests for error handling related to data types of method arguments."""
s = Series([1, 2, 3, 4, 5])
def test_validate_bool_args(self):
# Tests for error handling related to boolean arguments.
invalid_values = [1, "True", [1, 2, 3], 5.0]
for value in invalid_values:
with pytest.raises(ValueError):
self.s.reset_index(inplace=value)
with pytest.raises(ValueError):
self.s._set_name(name='hello', inplace=value)
with pytest.raises(ValueError):
self.s.sort_values(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.rename(inplace=value)
with pytest.raises(ValueError):
self.s.dropna(inplace=value)
## Instruction:
MAINT: Remove duplicate Series sort_index check
Duplicate boolean validation check for sort_index in series/test_validate.py
## Code After:
import pytest
from pandas.core.series import Series
class TestSeriesValidate(object):
"""Tests for error handling related to data types of method arguments."""
s = Series([1, 2, 3, 4, 5])
def test_validate_bool_args(self):
# Tests for error handling related to boolean arguments.
invalid_values = [1, "True", [1, 2, 3], 5.0]
for value in invalid_values:
with pytest.raises(ValueError):
self.s.reset_index(inplace=value)
with pytest.raises(ValueError):
self.s._set_name(name='hello', inplace=value)
with pytest.raises(ValueError):
self.s.sort_values(inplace=value)
with pytest.raises(ValueError):
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.rename(inplace=value)
with pytest.raises(ValueError):
self.s.dropna(inplace=value)
|
...
self.s.sort_index(inplace=value)
with pytest.raises(ValueError):
self.s.rename(inplace=value)
with pytest.raises(ValueError):
...
|
a7c210a68a8671137681c55324341c60b256a92b
|
symantecssl/core.py
|
symantecssl/core.py
|
from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
|
from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
|
Add a slightly higher level API for submitting an order
|
Add a slightly higher level API for submitting an order
|
Python
|
apache-2.0
|
glyph/symantecssl,chelseawinfree/symantecssl,cloudkeep/symantecssl,grigouze/symantecssl,jmvrbanac/symantecssl
|
python
|
## Code Before:
from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
## Instruction:
Add a slightly higher level API for submitting an order
## Code After:
from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
class Symantec(object):
def __init__(self, username, password,
url="https://api.geotrust.com/webtrust/partner"):
self.url = url
self.session = SymantecSession()
self.session.auth = SymantecAuth(username, password)
def submit(self, obj):
resp = self.session.post(self.url, obj.serialize())
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
|
// ... existing code ...
from __future__ import absolute_import, division, print_function
from .auth import SymantecAuth
from .order import Order
from .session import SymantecSession
// ... modified code ...
resp.raise_for_status()
return obj.response(resp.content)
def order(self, **kwargs):
obj = Order(**kwargs)
return self.submit(obj)
// ... rest of the code ...
|
c80fc3c31003e6ecec049ac2e1ca370e58ab2b3c
|
mediasync/processors/yuicompressor.py
|
mediasync/processors/yuicompressor.py
|
from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
Replace incorrect JS_MIMETYPES with CSS_MIMETYPES
|
Replace incorrect JS_MIMETYPES with CSS_MIMETYPES
|
Python
|
bsd-3-clause
|
sunlightlabs/django-mediasync,mntan/django-mediasync,mntan/django-mediasync,sunlightlabs/django-mediasync,sunlightlabs/django-mediasync,mntan/django-mediasync
|
python
|
## Code Before:
from django.conf import settings
from mediasync import JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
## Instruction:
Replace incorrect JS_MIMETYPES with CSS_MIMETYPES
## Code After:
from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
def _yui_path(settings):
if not hasattr(settings, 'MEDIASYNC'):
return None
path = settings.MEDIASYNC.get('YUI_COMPRESSOR_PATH', None)
if path:
path = os.path.realpath(os.path.expanduser(path))
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
def js_minifier(filedata, content_type, remote_path, is_active):
is_js = (content_type in JS_MIMETYPES or remote_path.lower().endswith('.js'))
yui_path = _yui_path(settings)
if is_js and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'js'], stdout=PIPE,
stderr=PIPE, stdin=PIPE)
stdout, stderr = proc.communicate(input=filedata)
return str(stdout)
|
# ... existing code ...
from django.conf import settings
from mediasync import CSS_MIMETYPES, JS_MIMETYPES
import os
from subprocess import Popen, PIPE
# ... modified code ...
return path
def css_minifier(filedata, content_type, remote_path, is_active):
is_css = (content_type in CSS_MIMETYPES or remote_path.lower().endswith('.css'))
yui_path = _yui_path(settings)
if is_css and yui_path and is_active:
proc = Popen(['java', '-jar', yui_path, '--type', 'css'], stdout=PIPE,
# ... rest of the code ...
|
bd4643e35a9c75d15bb6a4bfef63774fdd8bee5b
|
test/regress/cbrt.cpp.py
|
test/regress/cbrt.cpp.py
|
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
Add a typical 2-component case. Comment out a case that fail until integer support is fixed.
|
Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075
|
Python
|
lgpl-2.1
|
libsh-archive/sh,libsh-archive/sh,libsh-archive/sh,libsh-archive/sh,libsh-archive/sh,libsh-archive/sh
|
python
|
## Code Before:
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_make_test((3,), [(27,)], ['i', 'i'])
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
## Instruction:
Add a typical 2-component case.
Comment out a case that fail until integer support is fixed.
git-svn-id: f6f47f0a6375c1440c859a5b92b3b3fbb75bb58e@2508 afdca40c-03d6-0310-8ede-e9f093b21075
## Code After:
import shtest, sys, math
def cbrt(l, types=[]):
return shtest.make_test([math.pow(x, 1/3.0) for x in l], [l], types)
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output_header(sys.stdout)
test.output(sys.stdout, False)
# Test the cube root in immediate mode
test = shtest.ImmediateTest('cbrt_im', 1)
test.add_call(shtest.Call(shtest.Call.call, 'cbrt', 1))
insert_into(test)
test.output(sys.stdout, False)
test.output_footer(sys.stdout)
|
...
def insert_into(test):
test.add_test(cbrt((0.0, 1.0, 2.0, 3.0)))
test.add_test(cbrt((1.0,)))
test.add_test(cbrt((4000.2, 27)))
#test.add_make_test((3,), [(27,)], ['i', 'i']) # not currently working
# Test the cube root in stream programs
test = shtest.StreamTest('cbrt', 1)
...
|
99aabf10b091df07a023dbf638cf605c01db1d74
|
src/pcapi/utils/admin.py
|
src/pcapi/utils/admin.py
|
import argparse
import os
import shutil
from pcapi import get_resource
def create_skeleton(path):
if os.path.exists(path):
print 'Directory already exist'
return False
config_file = get_resource('pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
os.makedirs(os.path.join(path, 'logs'))
project_dir = os.path.abspath(path)
# copy the config file
shutil.copyfile(config_file, os.path.join(project_dir, 'pcapi.ini'))
return True
def parse_commandline():
# main parser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='actions',
dest='action')
# runserver parser
subparsers.add_parser('runserver', help='run the pcapi server')
# create parser
create = subparsers.add_parser('create',
help='create the pcapi instance structure')
create.add_argument('path',
action='store',
help='instance path')
args = parser.parse_args()
if args.action == 'create':
if not create_skeleton(args.path):
return
elif args.action == 'runserver':
from pcapi.server import runserver
runserver()
if __name__ == '__main__':
parse_commandline()
|
import argparse
import os
import shutil
from pkg_resources import resource_filename
def create_skeleton(path):
if os.path.exists(path):
print 'Directory already exist'
return False
config_file = resource_filename('pcapi', 'data/pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
os.makedirs(os.path.join(path, 'logs'))
project_dir = os.path.abspath(path)
# copy the config file
shutil.copyfile(config_file, os.path.join(project_dir, 'pcapi.ini'))
return True
def parse_commandline():
# main parser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='actions',
dest='action')
# runserver parser
subparsers.add_parser('runserver', help='run the pcapi server')
# create parser
create = subparsers.add_parser('create',
help='create the pcapi instance structure')
create.add_argument('path',
action='store',
help='instance path')
args = parser.parse_args()
if args.action == 'create':
if not create_skeleton(args.path):
return
elif args.action == 'runserver':
from pcapi.server import runserver
runserver()
if __name__ == '__main__':
parse_commandline()
|
Use the pkg api for reading the resources in the package
|
Use the pkg api for reading the resources in the package
Issue cobweb-eu/pcapi#18
|
Python
|
bsd-3-clause
|
cobweb-eu/pcapi,xmichael/pcapi,edina/pcapi,xmichael/pcapi,cobweb-eu/pcapi,edina/pcapi
|
python
|
## Code Before:
import argparse
import os
import shutil
from pcapi import get_resource
def create_skeleton(path):
if os.path.exists(path):
print 'Directory already exist'
return False
config_file = get_resource('pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
os.makedirs(os.path.join(path, 'logs'))
project_dir = os.path.abspath(path)
# copy the config file
shutil.copyfile(config_file, os.path.join(project_dir, 'pcapi.ini'))
return True
def parse_commandline():
# main parser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='actions',
dest='action')
# runserver parser
subparsers.add_parser('runserver', help='run the pcapi server')
# create parser
create = subparsers.add_parser('create',
help='create the pcapi instance structure')
create.add_argument('path',
action='store',
help='instance path')
args = parser.parse_args()
if args.action == 'create':
if not create_skeleton(args.path):
return
elif args.action == 'runserver':
from pcapi.server import runserver
runserver()
if __name__ == '__main__':
parse_commandline()
## Instruction:
Use the pkg api for reading the resources in the package
Issue cobweb-eu/pcapi#18
## Code After:
import argparse
import os
import shutil
from pkg_resources import resource_filename
def create_skeleton(path):
if os.path.exists(path):
print 'Directory already exist'
return False
config_file = resource_filename('pcapi', 'data/pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
os.makedirs(os.path.join(path, 'logs'))
project_dir = os.path.abspath(path)
# copy the config file
shutil.copyfile(config_file, os.path.join(project_dir, 'pcapi.ini'))
return True
def parse_commandline():
# main parser
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='actions',
dest='action')
# runserver parser
subparsers.add_parser('runserver', help='run the pcapi server')
# create parser
create = subparsers.add_parser('create',
help='create the pcapi instance structure')
create.add_argument('path',
action='store',
help='instance path')
args = parser.parse_args()
if args.action == 'create':
if not create_skeleton(args.path):
return
elif args.action == 'runserver':
from pcapi.server import runserver
runserver()
if __name__ == '__main__':
parse_commandline()
|
...
import os
import shutil
from pkg_resources import resource_filename
def create_skeleton(path):
...
print 'Directory already exist'
return False
config_file = resource_filename('pcapi', 'data/pcapi.ini.example')
# create the folder structure
os.makedirs(os.path.join(path, 'data'))
...
|
6a5bd804b53eef5356ab55e0f836890995bb98d6
|
jupyter-lib/kotlin-jupyter-api-gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/jupyter/api/plugin/ApiGradlePlugin.kt
|
jupyter-lib/kotlin-jupyter-api-gradle-plugin/src/main/kotlin/org/jetbrains/kotlinx/jupyter/api/plugin/ApiGradlePlugin.kt
|
package org.jetbrains.kotlinx.jupyter.api.plugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.kotlin.dsl.invoke
import org.gradle.kotlin.dsl.register
import org.gradle.kotlin.dsl.repositories
import org.jetbrains.kotlin.gradle.internal.Kapt3GradleSubplugin
import org.jetbrains.kotlin.gradle.plugin.KaptExtension
import org.jetbrains.kotlinx.jupyter.api.plugin.tasks.JupyterApiResourcesTask
class ApiGradlePlugin : Plugin<Project> {
override fun apply(target: Project) {
target.pluginManager.run {
apply(Kapt3GradleSubplugin::class.java)
}
target.extensions.configure<KaptExtension>("kapt") {
arguments {
arg("kotlin.jupyter.fqn.path", target.buildDir.resolve(FQNS_PATH))
}
}
target.repositories {
mavenCentral()
}
val pluginExtension = KotlinJupyterPluginExtension(target)
target.extensions.add("kotlinJupyter", pluginExtension)
pluginExtension.addDependenciesIfNeeded()
target.tasks {
val resourcesTaskName = "processJupyterApiResources"
register<JupyterApiResourcesTask>(resourcesTaskName) {
dependsOn("kaptKotlin")
}
named("processResources") {
dependsOn(resourcesTaskName)
}
}
}
companion object {
const val FQNS_PATH = "generated/jupyter/fqns"
}
}
|
package org.jetbrains.kotlinx.jupyter.api.plugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.kotlin.dsl.invoke
import org.gradle.kotlin.dsl.register
import org.gradle.kotlin.dsl.repositories
import org.jetbrains.kotlin.gradle.internal.Kapt3GradleSubplugin
import org.jetbrains.kotlin.gradle.plugin.KaptExtension
import org.jetbrains.kotlinx.jupyter.api.plugin.tasks.JupyterApiResourcesTask
class ApiGradlePlugin : Plugin<Project> {
override fun apply(target: Project) {
target.pluginManager.run {
apply(Kapt3GradleSubplugin::class.java)
}
target.extensions.configure<KaptExtension>("kapt") {
arguments {
val fqnsPath = target.buildDir.resolve(FQNS_PATH)
if (fqnsPath.exists()) fqnsPath.deleteRecursively()
arg("kotlin.jupyter.fqn.path", fqnsPath)
}
}
target.repositories {
mavenCentral()
}
val pluginExtension = KotlinJupyterPluginExtension(target)
target.extensions.add("kotlinJupyter", pluginExtension)
pluginExtension.addDependenciesIfNeeded()
target.tasks {
val resourcesTaskName = "processJupyterApiResources"
register<JupyterApiResourcesTask>(resourcesTaskName) {
dependsOn("kaptKotlin")
}
named("processResources") {
dependsOn(resourcesTaskName)
}
}
}
companion object {
const val FQNS_PATH = "generated/jupyter/fqns"
}
}
|
Clear Gradle caches for plugin properly
|
Clear Gradle caches for plugin properly
|
Kotlin
|
apache-2.0
|
ligee/kotlin-jupyter
|
kotlin
|
## Code Before:
package org.jetbrains.kotlinx.jupyter.api.plugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.kotlin.dsl.invoke
import org.gradle.kotlin.dsl.register
import org.gradle.kotlin.dsl.repositories
import org.jetbrains.kotlin.gradle.internal.Kapt3GradleSubplugin
import org.jetbrains.kotlin.gradle.plugin.KaptExtension
import org.jetbrains.kotlinx.jupyter.api.plugin.tasks.JupyterApiResourcesTask
class ApiGradlePlugin : Plugin<Project> {
override fun apply(target: Project) {
target.pluginManager.run {
apply(Kapt3GradleSubplugin::class.java)
}
target.extensions.configure<KaptExtension>("kapt") {
arguments {
arg("kotlin.jupyter.fqn.path", target.buildDir.resolve(FQNS_PATH))
}
}
target.repositories {
mavenCentral()
}
val pluginExtension = KotlinJupyterPluginExtension(target)
target.extensions.add("kotlinJupyter", pluginExtension)
pluginExtension.addDependenciesIfNeeded()
target.tasks {
val resourcesTaskName = "processJupyterApiResources"
register<JupyterApiResourcesTask>(resourcesTaskName) {
dependsOn("kaptKotlin")
}
named("processResources") {
dependsOn(resourcesTaskName)
}
}
}
companion object {
const val FQNS_PATH = "generated/jupyter/fqns"
}
}
## Instruction:
Clear Gradle caches for plugin properly
## Code After:
package org.jetbrains.kotlinx.jupyter.api.plugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.kotlin.dsl.invoke
import org.gradle.kotlin.dsl.register
import org.gradle.kotlin.dsl.repositories
import org.jetbrains.kotlin.gradle.internal.Kapt3GradleSubplugin
import org.jetbrains.kotlin.gradle.plugin.KaptExtension
import org.jetbrains.kotlinx.jupyter.api.plugin.tasks.JupyterApiResourcesTask
class ApiGradlePlugin : Plugin<Project> {
override fun apply(target: Project) {
target.pluginManager.run {
apply(Kapt3GradleSubplugin::class.java)
}
target.extensions.configure<KaptExtension>("kapt") {
arguments {
val fqnsPath = target.buildDir.resolve(FQNS_PATH)
if (fqnsPath.exists()) fqnsPath.deleteRecursively()
arg("kotlin.jupyter.fqn.path", fqnsPath)
}
}
target.repositories {
mavenCentral()
}
val pluginExtension = KotlinJupyterPluginExtension(target)
target.extensions.add("kotlinJupyter", pluginExtension)
pluginExtension.addDependenciesIfNeeded()
target.tasks {
val resourcesTaskName = "processJupyterApiResources"
register<JupyterApiResourcesTask>(resourcesTaskName) {
dependsOn("kaptKotlin")
}
named("processResources") {
dependsOn(resourcesTaskName)
}
}
}
companion object {
const val FQNS_PATH = "generated/jupyter/fqns"
}
}
|
// ... existing code ...
target.extensions.configure<KaptExtension>("kapt") {
arguments {
val fqnsPath = target.buildDir.resolve(FQNS_PATH)
if (fqnsPath.exists()) fqnsPath.deleteRecursively()
arg("kotlin.jupyter.fqn.path", fqnsPath)
}
}
// ... rest of the code ...
|
785cbcb8ff92520e0e57fec3634353935b5b030a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='depfinder',
version='0.0.1',
author='Eric Dill',
author_email='[email protected]',
py_modules=['depfinder'],
description='Find all the imports in your library',
url='http://github.com/ericdill/depfinder',
platforms='Cross platform (Linux, Mac OSX, Windows)',
install_requires=['stdlib_list', 'setuptools'],
)
|
from setuptools import setup
import depfinder
setup(name='depfinder',
version=depfinder.__version__,
author='Eric Dill',
author_email='[email protected]',
py_modules=['depfinder'],
description='Find all the imports in your library',
url='http://github.com/ericdill/depfinder',
platforms='Cross platform (Linux, Mac OSX, Windows)',
install_requires=['stdlib_list', 'setuptools'],
)
|
Use the version specified in depfinder.py
|
MNT: Use the version specified in depfinder.py
|
Python
|
bsd-3-clause
|
ericdill/depfinder
|
python
|
## Code Before:
from setuptools import setup
setup(name='depfinder',
version='0.0.1',
author='Eric Dill',
author_email='[email protected]',
py_modules=['depfinder'],
description='Find all the imports in your library',
url='http://github.com/ericdill/depfinder',
platforms='Cross platform (Linux, Mac OSX, Windows)',
install_requires=['stdlib_list', 'setuptools'],
)
## Instruction:
MNT: Use the version specified in depfinder.py
## Code After:
from setuptools import setup
import depfinder
setup(name='depfinder',
version=depfinder.__version__,
author='Eric Dill',
author_email='[email protected]',
py_modules=['depfinder'],
description='Find all the imports in your library',
url='http://github.com/ericdill/depfinder',
platforms='Cross platform (Linux, Mac OSX, Windows)',
install_requires=['stdlib_list', 'setuptools'],
)
|
...
from setuptools import setup
import depfinder
setup(name='depfinder',
version=depfinder.__version__,
author='Eric Dill',
author_email='[email protected]',
py_modules=['depfinder'],
...
|
32fd301acf36c262bf6b27945940438e232a03ec
|
src/main/java/valandur/webapi/cache/misc/CachedCatalogType.java
|
src/main/java/valandur/webapi/cache/misc/CachedCatalogType.java
|
package valandur.webapi.cache.misc;
import org.spongepowered.api.CatalogType;
import valandur.webapi.api.cache.CachedObject;
public class CachedCatalogType extends CachedObject<CatalogType> {
private String id;
public String getId() {
return id;
}
private String name;
public String getName() {
return name;
}
public CachedCatalogType(CatalogType catalogType) {
super(catalogType, false);
this.id = catalogType.getId();
this.name = catalogType.getName();
}
}
|
package valandur.webapi.cache.misc;
import org.spongepowered.api.CatalogType;
import org.spongepowered.api.text.translation.Translatable;
import valandur.webapi.api.cache.CachedObject;
public class CachedCatalogType extends CachedObject<CatalogType> {
private String id;
public String getId() {
return id;
}
private String name;
public String getName() {
return name;
}
public CachedCatalogType(CatalogType catalogType) {
super(catalogType, false);
this.id = catalogType.getId();
if (catalogType instanceof Translatable) {
this.name = ((Translatable)catalogType).getTranslation().get();
} else {
this.name = catalogType.getName();
}
}
}
|
Return translated name for catalog types when possible
|
fix(catalog-type): Return translated name for catalog types when possible
|
Java
|
mit
|
Valandur/Web-API,Valandur/Web-API,Valandur/Web-API
|
java
|
## Code Before:
package valandur.webapi.cache.misc;
import org.spongepowered.api.CatalogType;
import valandur.webapi.api.cache.CachedObject;
public class CachedCatalogType extends CachedObject<CatalogType> {
private String id;
public String getId() {
return id;
}
private String name;
public String getName() {
return name;
}
public CachedCatalogType(CatalogType catalogType) {
super(catalogType, false);
this.id = catalogType.getId();
this.name = catalogType.getName();
}
}
## Instruction:
fix(catalog-type): Return translated name for catalog types when possible
## Code After:
package valandur.webapi.cache.misc;
import org.spongepowered.api.CatalogType;
import org.spongepowered.api.text.translation.Translatable;
import valandur.webapi.api.cache.CachedObject;
public class CachedCatalogType extends CachedObject<CatalogType> {
private String id;
public String getId() {
return id;
}
private String name;
public String getName() {
return name;
}
public CachedCatalogType(CatalogType catalogType) {
super(catalogType, false);
this.id = catalogType.getId();
if (catalogType instanceof Translatable) {
this.name = ((Translatable)catalogType).getTranslation().get();
} else {
this.name = catalogType.getName();
}
}
}
|
// ... existing code ...
package valandur.webapi.cache.misc;
import org.spongepowered.api.CatalogType;
import org.spongepowered.api.text.translation.Translatable;
import valandur.webapi.api.cache.CachedObject;
public class CachedCatalogType extends CachedObject<CatalogType> {
// ... modified code ...
super(catalogType, false);
this.id = catalogType.getId();
if (catalogType instanceof Translatable) {
this.name = ((Translatable)catalogType).getTranslation().get();
} else {
this.name = catalogType.getName();
}
}
}
// ... rest of the code ...
|
d5a2a11d23b9f5393b0b39ca2f90978276311f52
|
app/slot/routes.py
|
app/slot/routes.py
|
from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/dashboard')
# @requires_auth
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms()
|
from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/')
@app.route('/dashboard')
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms()
|
Add / route to index. Remove old requires_auth decorator.
|
Add / route to index.
Remove old requires_auth decorator.
|
Python
|
mit
|
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
|
python
|
## Code Before:
from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/dashboard')
# @requires_auth
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms()
## Instruction:
Add / route to index.
Remove old requires_auth decorator.
## Code After:
from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/')
@app.route('/dashboard')
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms()
|
...
from flask.ext.login import login_required
@app.route('/')
@app.route('/dashboard')
@login_required
def index():
return con.index()
...
|
2fa43f43be5e84f0039db10aee4cd89e3f3b9001
|
src/main/java/gui/DrawableNode.java
|
src/main/java/gui/DrawableNode.java
|
package gui;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
/**
* Created by TUDelft SID on 17-5-2017.
*/
public class DrawableNode {
private static final int ARC_SIZE = 10;
private static GraphicsContext gc;
private int id;
private double xCoordinate;
private double yCoordinate;
private double width;
private double height;
private boolean highlighted;
public DrawableNode (int id, GraphicsContext gc) {
this.gc = gc;
this.id = id;
}
public void setCoordinates(double x, double y, double width, double height) {
this.xCoordinate = x;
this.yCoordinate = y;
this.width = width;
this.height = height;
}
public int getId() {
return this.id;
}
public void highlight() {
this.highlighted = true;
draw();
}
public void lowlight() {
this.highlighted = false;
draw();
}
public void draw() {
if (highlighted) {
gc.setFill(Color.ORANGE);
} else {
gc.setFill(Color.BLUE);
}
gc.fillRoundRect(xCoordinate, yCoordinate, width, height, ARC_SIZE, ARC_SIZE);
}
public boolean checkClick(double xEvent, double yEvent) {
return (xEvent > xCoordinate && xEvent < xCoordinate + width && yEvent > yCoordinate && yEvent < yCoordinate + height);
}
}
|
package gui;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
/**
* Created by TUDelft SID on 17-5-2017.
*/
public class DrawableNode {
private static final int ARC_SIZE = 10;
private static GraphicsContext gc;
private int id;
private double xCoordinate;
private double yCoordinate;
private double width;
private double height;
private boolean highlighted;
private boolean isDummy;
public DrawableNode (int id, GraphicsContext gc) {
this.gc = gc;
this.id = id;
}
public void setCoordinates(double x, double y, double width, double height) {
this.xCoordinate = x;
this.yCoordinate = y;
this.width = width;
this.height = height;
}
public int getId() {
return this.id;
}
public boolean isDummy() {
return isDummy;
}
public void setDummy(boolean dummy) {
isDummy = dummy;
}
public void highlight() {
this.highlighted = true;
draw();
}
public void lowlight() {
this.highlighted = false;
draw();
}
public void draw() {
if (highlighted) {
gc.setFill(Color.ORANGE);
} else {
gc.setFill(Color.BLUE);
}
gc.fillRoundRect(xCoordinate, yCoordinate, width, height, ARC_SIZE, ARC_SIZE);
}
public boolean checkClick(double xEvent, double yEvent) {
return (xEvent > xCoordinate && xEvent < xCoordinate + width && yEvent > yCoordinate && yEvent < yCoordinate + height);
}
}
|
Add dummy attribute to drawable node
|
Add dummy attribute to drawable node
|
Java
|
apache-2.0
|
ProgrammingLife2017/DynamiteAndButterflies
|
java
|
## Code Before:
package gui;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
/**
* Created by TUDelft SID on 17-5-2017.
*/
public class DrawableNode {
private static final int ARC_SIZE = 10;
private static GraphicsContext gc;
private int id;
private double xCoordinate;
private double yCoordinate;
private double width;
private double height;
private boolean highlighted;
public DrawableNode (int id, GraphicsContext gc) {
this.gc = gc;
this.id = id;
}
public void setCoordinates(double x, double y, double width, double height) {
this.xCoordinate = x;
this.yCoordinate = y;
this.width = width;
this.height = height;
}
public int getId() {
return this.id;
}
public void highlight() {
this.highlighted = true;
draw();
}
public void lowlight() {
this.highlighted = false;
draw();
}
public void draw() {
if (highlighted) {
gc.setFill(Color.ORANGE);
} else {
gc.setFill(Color.BLUE);
}
gc.fillRoundRect(xCoordinate, yCoordinate, width, height, ARC_SIZE, ARC_SIZE);
}
public boolean checkClick(double xEvent, double yEvent) {
return (xEvent > xCoordinate && xEvent < xCoordinate + width && yEvent > yCoordinate && yEvent < yCoordinate + height);
}
}
## Instruction:
Add dummy attribute to drawable node
## Code After:
package gui;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.paint.Color;
/**
* Created by TUDelft SID on 17-5-2017.
*/
public class DrawableNode {
private static final int ARC_SIZE = 10;
private static GraphicsContext gc;
private int id;
private double xCoordinate;
private double yCoordinate;
private double width;
private double height;
private boolean highlighted;
private boolean isDummy;
public DrawableNode (int id, GraphicsContext gc) {
this.gc = gc;
this.id = id;
}
public void setCoordinates(double x, double y, double width, double height) {
this.xCoordinate = x;
this.yCoordinate = y;
this.width = width;
this.height = height;
}
public int getId() {
return this.id;
}
public boolean isDummy() {
return isDummy;
}
public void setDummy(boolean dummy) {
isDummy = dummy;
}
public void highlight() {
this.highlighted = true;
draw();
}
public void lowlight() {
this.highlighted = false;
draw();
}
public void draw() {
if (highlighted) {
gc.setFill(Color.ORANGE);
} else {
gc.setFill(Color.BLUE);
}
gc.fillRoundRect(xCoordinate, yCoordinate, width, height, ARC_SIZE, ARC_SIZE);
}
public boolean checkClick(double xEvent, double yEvent) {
return (xEvent > xCoordinate && xEvent < xCoordinate + width && yEvent > yCoordinate && yEvent < yCoordinate + height);
}
}
|
# ... existing code ...
private double width;
private double height;
private boolean highlighted;
private boolean isDummy;
public DrawableNode (int id, GraphicsContext gc) {
this.gc = gc;
# ... modified code ...
public int getId() {
return this.id;
}
public boolean isDummy() {
return isDummy;
}
public void setDummy(boolean dummy) {
isDummy = dummy;
}
public void highlight() {
# ... rest of the code ...
|
43c48cdbc5cf5793ad6f0f46cde5ca91ad2b8756
|
core/metautils/src/vectorLinkdef.h
|
core/metautils/src/vectorLinkdef.h
|
using namespace std;
#endif
#pragma create TClass vector<bool>;
#pragma create TClass vector<char>;
#pragma create TClass vector<short>;
#pragma create TClass vector<long>;
#pragma create TClass vector<unsigned char>;
#pragma create TClass vector<unsigned short>;
#pragma create TClass vector<unsigned int>;
#pragma create TClass vector<unsigned long>;
#pragma create TClass vector<float>;
#pragma create TClass vector<double>;
#pragma create TClass vector<char*>;
#pragma create TClass vector<const char*>;
#pragma create TClass vector<string>;
#if (!(G__GNUC==3 && G__GNUC_MINOR==1)) && !defined(G__KCC) && (!defined(G__VISUAL) || G__MSC_VER<1300)
// gcc3.1,3.2 has a problem with iterator<void*,...,void&>
#pragma create TClass vector<void*>;
#endif
|
using namespace std;
#endif
#pragma create TClass vector<bool>;
#pragma create TClass vector<char>;
#pragma create TClass vector<short>;
#pragma create TClass vector<long>;
#pragma create TClass vector<unsigned char>;
#pragma create TClass vector<unsigned short>;
#pragma create TClass vector<unsigned int>;
#pragma create TClass vector<unsigned long>;
#pragma create TClass vector<float>;
#pragma create TClass vector<double>;
#pragma create TClass vector<char*>;
#pragma create TClass vector<const char*>;
#pragma create TClass vector<string>;
#pragma create TClass vector<Long64_t>;
#pragma create TClass vector<ULong64_t>;
#if (!(G__GNUC==3 && G__GNUC_MINOR==1)) && !defined(G__KCC) && (!defined(G__VISUAL) || G__MSC_VER<1300)
// gcc3.1,3.2 has a problem with iterator<void*,...,void&>
#pragma create TClass vector<void*>;
#endif
|
Add missing TClass creation for vector<Long64_t> and vector<ULong64_t>
|
Add missing TClass creation for vector<Long64_t> and vector<ULong64_t>
git-svn-id: ecbadac9c76e8cf640a0bca86f6bd796c98521e3@38659 27541ba8-7e3a-0410-8455-c3a389f83636
|
C
|
lgpl-2.1
|
bbannier/ROOT,bbannier/ROOT,bbannier/ROOT,bbannier/ROOT,bbannier/ROOT,bbannier/ROOT,bbannier/ROOT
|
c
|
## Code Before:
using namespace std;
#endif
#pragma create TClass vector<bool>;
#pragma create TClass vector<char>;
#pragma create TClass vector<short>;
#pragma create TClass vector<long>;
#pragma create TClass vector<unsigned char>;
#pragma create TClass vector<unsigned short>;
#pragma create TClass vector<unsigned int>;
#pragma create TClass vector<unsigned long>;
#pragma create TClass vector<float>;
#pragma create TClass vector<double>;
#pragma create TClass vector<char*>;
#pragma create TClass vector<const char*>;
#pragma create TClass vector<string>;
#if (!(G__GNUC==3 && G__GNUC_MINOR==1)) && !defined(G__KCC) && (!defined(G__VISUAL) || G__MSC_VER<1300)
// gcc3.1,3.2 has a problem with iterator<void*,...,void&>
#pragma create TClass vector<void*>;
#endif
## Instruction:
Add missing TClass creation for vector<Long64_t> and vector<ULong64_t>
git-svn-id: ecbadac9c76e8cf640a0bca86f6bd796c98521e3@38659 27541ba8-7e3a-0410-8455-c3a389f83636
## Code After:
using namespace std;
#endif
#pragma create TClass vector<bool>;
#pragma create TClass vector<char>;
#pragma create TClass vector<short>;
#pragma create TClass vector<long>;
#pragma create TClass vector<unsigned char>;
#pragma create TClass vector<unsigned short>;
#pragma create TClass vector<unsigned int>;
#pragma create TClass vector<unsigned long>;
#pragma create TClass vector<float>;
#pragma create TClass vector<double>;
#pragma create TClass vector<char*>;
#pragma create TClass vector<const char*>;
#pragma create TClass vector<string>;
#pragma create TClass vector<Long64_t>;
#pragma create TClass vector<ULong64_t>;
#if (!(G__GNUC==3 && G__GNUC_MINOR==1)) && !defined(G__KCC) && (!defined(G__VISUAL) || G__MSC_VER<1300)
// gcc3.1,3.2 has a problem with iterator<void*,...,void&>
#pragma create TClass vector<void*>;
#endif
|
...
#pragma create TClass vector<char*>;
#pragma create TClass vector<const char*>;
#pragma create TClass vector<string>;
#pragma create TClass vector<Long64_t>;
#pragma create TClass vector<ULong64_t>;
#if (!(G__GNUC==3 && G__GNUC_MINOR==1)) && !defined(G__KCC) && (!defined(G__VISUAL) || G__MSC_VER<1300)
// gcc3.1,3.2 has a problem with iterator<void*,...,void&>
...
|
739860d8da3e3380e49283a1fca2c43750349909
|
setup.py
|
setup.py
|
from setuptools import setup
import mikla
setup(
name='mikla',
version=mikla.__version__.strip(),
url='http://dirtymonkey.co.uk/mikla',
license='MIT',
author=mikla.__author__.strip(),
author_email='[email protected]',
description=mikla.__doc__.strip().replace('\n', ' '),
long_description=open('README.rst').read(),
keywords='encryption security gnupg gpg',
packages=['mikla'],
include_package_data=True,
entry_points={
'console_scripts': [
'mikla = mikla.main:main',
],
},
install_requires=[
'docopt>=0.6.2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: BSD',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Security :: Cryptography',
'Topic :: Communications',
'Topic :: Utilities',
],
)
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info[:2] < (3, 5):
print('Mikla only runs on Python 3.5 or later', file=sys.stderr)
sys.exit(1)
import mikla
setup(
name='mikla',
version=mikla.__version__.strip(),
url='http://dirtymonkey.co.uk/mikla',
license='MIT',
author=mikla.__author__.strip(),
author_email='[email protected]',
description=mikla.__doc__.strip().replace('\n', ' '),
long_description=open('README.rst').read(),
keywords='encryption security gnupg gpg',
packages=['mikla'],
include_package_data=True,
entry_points={
'console_scripts': [
'mikla = mikla.main:main',
],
},
install_requires=[
'docopt>=0.6.2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: BSD',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Security :: Cryptography',
'Topic :: Communications',
'Topic :: Utilities',
],
)
|
Add helpful message for users trying to install with Python < 3.5
|
Add helpful message for users trying to install with Python < 3.5
|
Python
|
mit
|
Matt-Deacalion/Mikla
|
python
|
## Code Before:
from setuptools import setup
import mikla
setup(
name='mikla',
version=mikla.__version__.strip(),
url='http://dirtymonkey.co.uk/mikla',
license='MIT',
author=mikla.__author__.strip(),
author_email='[email protected]',
description=mikla.__doc__.strip().replace('\n', ' '),
long_description=open('README.rst').read(),
keywords='encryption security gnupg gpg',
packages=['mikla'],
include_package_data=True,
entry_points={
'console_scripts': [
'mikla = mikla.main:main',
],
},
install_requires=[
'docopt>=0.6.2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: BSD',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Security :: Cryptography',
'Topic :: Communications',
'Topic :: Utilities',
],
)
## Instruction:
Add helpful message for users trying to install with Python < 3.5
## Code After:
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info[:2] < (3, 5):
print('Mikla only runs on Python 3.5 or later', file=sys.stderr)
sys.exit(1)
import mikla
setup(
name='mikla',
version=mikla.__version__.strip(),
url='http://dirtymonkey.co.uk/mikla',
license='MIT',
author=mikla.__author__.strip(),
author_email='[email protected]',
description=mikla.__doc__.strip().replace('\n', ' '),
long_description=open('README.rst').read(),
keywords='encryption security gnupg gpg',
packages=['mikla'],
include_package_data=True,
entry_points={
'console_scripts': [
'mikla = mikla.main:main',
],
},
install_requires=[
'docopt>=0.6.2',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: BSD',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Security :: Cryptography',
'Topic :: Communications',
'Topic :: Utilities',
],
)
|
# ... existing code ...
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info[:2] < (3, 5):
print('Mikla only runs on Python 3.5 or later', file=sys.stderr)
sys.exit(1)
import mikla
setup(
name='mikla',
# ... rest of the code ...
|
4ee689a4825a93cf6b0116b6b7343028c96b5cfb
|
bernard/discord_notifier.py
|
bernard/discord_notifier.py
|
"""A logging handler that emits to a Discord webhook."""
import requests
from logging import Handler
class DiscordHandler(Handler):
"""A logging handler that emits to a Discord webhook."""
def __init__(self, webhook, *args, **kwargs):
"""Initialize the DiscordHandler class."""
super().__init__(*args, **kwargs)
self.webhook = webhook
def emit(self, record):
"""Emit record to the Discord webhook."""
json = {'content': self.format(record)}
try:
requests.post(self.webhook, json=json)
except:
self.handleError(record)
|
"""A logging handler that emits to a Discord webhook."""
import requests
from logging import Handler
class DiscordHandler(Handler):
"""A logging handler that emits to a Discord webhook."""
def __init__(self, webhook, *args, **kwargs):
"""Initialize the DiscordHandler class."""
super().__init__(*args, **kwargs)
self.webhook = webhook
def emit(self, record):
"""Emit record to the Discord webhook."""
json = {'content': self.format(record)}
try:
requests.post(self.webhook, json=json)
except requests.RequestException:
self.handleError(record)
|
Fix bare 'except' in DiscordHandler
|
Fix bare 'except' in DiscordHandler
|
Python
|
mit
|
leviroth/bernard
|
python
|
## Code Before:
"""A logging handler that emits to a Discord webhook."""
import requests
from logging import Handler
class DiscordHandler(Handler):
"""A logging handler that emits to a Discord webhook."""
def __init__(self, webhook, *args, **kwargs):
"""Initialize the DiscordHandler class."""
super().__init__(*args, **kwargs)
self.webhook = webhook
def emit(self, record):
"""Emit record to the Discord webhook."""
json = {'content': self.format(record)}
try:
requests.post(self.webhook, json=json)
except:
self.handleError(record)
## Instruction:
Fix bare 'except' in DiscordHandler
## Code After:
"""A logging handler that emits to a Discord webhook."""
import requests
from logging import Handler
class DiscordHandler(Handler):
"""A logging handler that emits to a Discord webhook."""
def __init__(self, webhook, *args, **kwargs):
"""Initialize the DiscordHandler class."""
super().__init__(*args, **kwargs)
self.webhook = webhook
def emit(self, record):
"""Emit record to the Discord webhook."""
json = {'content': self.format(record)}
try:
requests.post(self.webhook, json=json)
except requests.RequestException:
self.handleError(record)
|
# ... existing code ...
json = {'content': self.format(record)}
try:
requests.post(self.webhook, json=json)
except requests.RequestException:
self.handleError(record)
# ... rest of the code ...
|
b2d2f4e4bde02570e51537d4db72cebcba63c1f5
|
malcolm/modules/builtin/parts/labelpart.py
|
malcolm/modules/builtin/parts/labelpart.py
|
from annotypes import Anno
from malcolm.core import Part, PartRegistrar, StringMeta
from ..infos import LabelInfo
from ..util import set_tags
with Anno("Initial value of Block label"):
ALabelValue = str
class LabelPart(Part):
"""Part representing a the title of the Block a GUI should display"""
def __init__(self, value=None):
# type: (ALabelValue) -> None
super(LabelPart, self).__init__("label")
meta = StringMeta("Label for the block")
set_tags(meta, writeable=True)
self.initial_value = value
self.attr = meta.create_attribute_model(self.initial_value)
def setup(self, registrar):
# type: (PartRegistrar) -> None
super(LabelPart, self).setup(registrar)
registrar.add_attribute_model(self.name, self.attr, self.set_label)
self.registrar.report(LabelInfo(self.initial_value))
def set_label(self, value, ts=None):
self.attr.set_value(value, ts=ts)
self.registrar.report(LabelInfo(value))
|
from annotypes import Anno
from malcolm.core import Part, PartRegistrar, StringMeta
from ..infos import LabelInfo
from ..util import set_tags
with Anno("Initial value of Block label"):
ALabelValue = str
class LabelPart(Part):
"""Part representing a the title of the Block a GUI should display"""
def __init__(self, value=None):
# type: (ALabelValue) -> None
super(LabelPart, self).__init__("label")
meta = StringMeta("Label for the block")
set_tags(meta, writeable=True)
self.initial_value = value
self.attr = meta.create_attribute_model(self.initial_value)
def _report(self):
self.registrar.report(LabelInfo(self.attr.value))
def setup(self, registrar):
# type: (PartRegistrar) -> None
super(LabelPart, self).setup(registrar)
registrar.add_attribute_model(self.name, self.attr, self.set_label)
self._report()
def set_label(self, value, ts=None):
self.attr.set_value(value, ts=ts)
self._report()
|
Fix LabelPart to always report the validated set value
|
Fix LabelPart to always report the validated set value
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
python
|
## Code Before:
from annotypes import Anno
from malcolm.core import Part, PartRegistrar, StringMeta
from ..infos import LabelInfo
from ..util import set_tags
with Anno("Initial value of Block label"):
ALabelValue = str
class LabelPart(Part):
"""Part representing a the title of the Block a GUI should display"""
def __init__(self, value=None):
# type: (ALabelValue) -> None
super(LabelPart, self).__init__("label")
meta = StringMeta("Label for the block")
set_tags(meta, writeable=True)
self.initial_value = value
self.attr = meta.create_attribute_model(self.initial_value)
def setup(self, registrar):
# type: (PartRegistrar) -> None
super(LabelPart, self).setup(registrar)
registrar.add_attribute_model(self.name, self.attr, self.set_label)
self.registrar.report(LabelInfo(self.initial_value))
def set_label(self, value, ts=None):
self.attr.set_value(value, ts=ts)
self.registrar.report(LabelInfo(value))
## Instruction:
Fix LabelPart to always report the validated set value
## Code After:
from annotypes import Anno
from malcolm.core import Part, PartRegistrar, StringMeta
from ..infos import LabelInfo
from ..util import set_tags
with Anno("Initial value of Block label"):
ALabelValue = str
class LabelPart(Part):
"""Part representing a the title of the Block a GUI should display"""
def __init__(self, value=None):
# type: (ALabelValue) -> None
super(LabelPart, self).__init__("label")
meta = StringMeta("Label for the block")
set_tags(meta, writeable=True)
self.initial_value = value
self.attr = meta.create_attribute_model(self.initial_value)
def _report(self):
self.registrar.report(LabelInfo(self.attr.value))
def setup(self, registrar):
# type: (PartRegistrar) -> None
super(LabelPart, self).setup(registrar)
registrar.add_attribute_model(self.name, self.attr, self.set_label)
self._report()
def set_label(self, value, ts=None):
self.attr.set_value(value, ts=ts)
self._report()
|
// ... existing code ...
self.initial_value = value
self.attr = meta.create_attribute_model(self.initial_value)
def _report(self):
self.registrar.report(LabelInfo(self.attr.value))
def setup(self, registrar):
# type: (PartRegistrar) -> None
super(LabelPart, self).setup(registrar)
registrar.add_attribute_model(self.name, self.attr, self.set_label)
self._report()
def set_label(self, value, ts=None):
self.attr.set_value(value, ts=ts)
self._report()
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.