commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
cfab8acf28fd7aff0b2d355c9935b028376ea08e
|
sequence.h
|
sequence.h
|
typedef enum IterType_ {ITERLIST, ITERVECTOR} IterType;
typedef struct Iter_ {
IterType type;
union {
struct {
Object *node;
bool first;
} list;
struct {
Object *object;
int index;
} vec;
} u;
} Iter;
bool object_isseq(Object *object);
Iter seq_iter(Object *object);
Object *iter_next(Iter *iter);
int seq_len(Object *object);
Object *seq_nth(Object *object, int n);
#endif
|
typedef enum IterType_ {ITERLIST, ITERVECTOR} IterType;
typedef struct Iter_ {
IterType type;
union {
struct {
Object *node;
bool first;
} list;
struct {
Object *object;
int index;
} vec;
} u;
} Iter;
bool object_isseq(Object *object) __attribute__ ((pure));
Iter seq_iter(Object *object);
Object *iter_next(Iter *iter);
int seq_len(Object *object) __attribute__ ((pure));
Object *seq_nth(Object *object, int n) __attribute__ ((pure));
#endif
|
Mark some functions as pure
|
Mark some functions as pure
|
C
|
mit
|
ids1024/idslisp
|
c
|
## Code Before:
typedef enum IterType_ {ITERLIST, ITERVECTOR} IterType;
typedef struct Iter_ {
IterType type;
union {
struct {
Object *node;
bool first;
} list;
struct {
Object *object;
int index;
} vec;
} u;
} Iter;
bool object_isseq(Object *object);
Iter seq_iter(Object *object);
Object *iter_next(Iter *iter);
int seq_len(Object *object);
Object *seq_nth(Object *object, int n);
#endif
## Instruction:
Mark some functions as pure
## Code After:
typedef enum IterType_ {ITERLIST, ITERVECTOR} IterType;
typedef struct Iter_ {
IterType type;
union {
struct {
Object *node;
bool first;
} list;
struct {
Object *object;
int index;
} vec;
} u;
} Iter;
bool object_isseq(Object *object) __attribute__ ((pure));
Iter seq_iter(Object *object);
Object *iter_next(Iter *iter);
int seq_len(Object *object) __attribute__ ((pure));
Object *seq_nth(Object *object, int n) __attribute__ ((pure));
#endif
|
# ... existing code ...
} u;
} Iter;
bool object_isseq(Object *object) __attribute__ ((pure));
Iter seq_iter(Object *object);
Object *iter_next(Iter *iter);
int seq_len(Object *object) __attribute__ ((pure));
Object *seq_nth(Object *object, int n) __attribute__ ((pure));
#endif
# ... rest of the code ...
|
628f9edd7aefda1f9cf29cd5a3d04342877a5c38
|
custom/icds/rules/custom_actions.py
|
custom/icds/rules/custom_actions.py
|
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
def escalate_tech_issue(case, rule):
if case.type != 'tech_issue'
return CaseRuleActionResult()
escalated_ticket_level_map = {
'supervisor': 'block',
'block': 'district',
'district': 'state',
}
current_ticket_level = case.get_case_property('ticket_level')
if current_ticket_level not in escalated_ticket_level_map:
return CaseRuleActionResult()
escalated_ticket_level = escalated_ticket_level_map[current_ticket_level]
result = update_case(
case.domain,
case.case_id,
case_properties={'ticket_level': escalated_ticket_level},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
rule.log_submission(result[0].form_id)
return CaseRuleActionResult(num_updates=1)
|
import pytz
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
from corehq.util.timezones.conversions import ServerTime
from datetime import datetime
def escalate_tech_issue(case, rule):
if case.type != 'tech_issue'
return CaseRuleActionResult()
escalated_ticket_level_map = {
'supervisor': 'block',
'block': 'district',
'district': 'state',
}
escalated_location_id_map = {
'supervisor': case.get_case_property('block_location_id'),
'block': case.get_case_property('district_location_id'),
'district': case.get_case_property('state_location_id'),
}
current_ticket_level = case.get_case_property('ticket_level')
escalated_ticket_level = escalated_ticket_level_map.get(current_ticket_level)
escalated_location_id = escalated_location_id_map.get(current_ticket_level)
if not escalated_ticket_level or not escalated_location_id:
return CaseRuleActionResult()
today = ServerTime(datetime.utcnow()).user_time(pytz.timezone('Asia/Kolkata')).done().date()
result = update_case(
case.domain,
case.case_id,
case_properties={
'ticket_level': escalated_ticket_level,
'change_in_level': '1',
'touch_case_date': today.strftime('%Y-%m-%d'),
},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
rule.log_submission(result[0].form_id)
return CaseRuleActionResult(num_updates=1)
|
Add more properties to be updated
|
Add more properties to be updated
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
python
|
## Code Before:
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
def escalate_tech_issue(case, rule):
if case.type != 'tech_issue'
return CaseRuleActionResult()
escalated_ticket_level_map = {
'supervisor': 'block',
'block': 'district',
'district': 'state',
}
current_ticket_level = case.get_case_property('ticket_level')
if current_ticket_level not in escalated_ticket_level_map:
return CaseRuleActionResult()
escalated_ticket_level = escalated_ticket_level_map[current_ticket_level]
result = update_case(
case.domain,
case.case_id,
case_properties={'ticket_level': escalated_ticket_level},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
rule.log_submission(result[0].form_id)
return CaseRuleActionResult(num_updates=1)
## Instruction:
Add more properties to be updated
## Code After:
import pytz
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
from corehq.util.timezones.conversions import ServerTime
from datetime import datetime
def escalate_tech_issue(case, rule):
if case.type != 'tech_issue'
return CaseRuleActionResult()
escalated_ticket_level_map = {
'supervisor': 'block',
'block': 'district',
'district': 'state',
}
escalated_location_id_map = {
'supervisor': case.get_case_property('block_location_id'),
'block': case.get_case_property('district_location_id'),
'district': case.get_case_property('state_location_id'),
}
current_ticket_level = case.get_case_property('ticket_level')
escalated_ticket_level = escalated_ticket_level_map.get(current_ticket_level)
escalated_location_id = escalated_location_id_map.get(current_ticket_level)
if not escalated_ticket_level or not escalated_location_id:
return CaseRuleActionResult()
today = ServerTime(datetime.utcnow()).user_time(pytz.timezone('Asia/Kolkata')).done().date()
result = update_case(
case.domain,
case.case_id,
case_properties={
'ticket_level': escalated_ticket_level,
'change_in_level': '1',
'touch_case_date': today.strftime('%Y-%m-%d'),
},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
rule.log_submission(result[0].form_id)
return CaseRuleActionResult(num_updates=1)
|
...
import pytz
from corehq.apps.data_interfaces.models import CaseRuleActionResult, AUTO_UPDATE_XMLNS
from corehq.apps.hqcase.utils import update_case
from corehq.util.timezones.conversions import ServerTime
from datetime import datetime
def escalate_tech_issue(case, rule):
...
'district': 'state',
}
escalated_location_id_map = {
'supervisor': case.get_case_property('block_location_id'),
'block': case.get_case_property('district_location_id'),
'district': case.get_case_property('state_location_id'),
}
current_ticket_level = case.get_case_property('ticket_level')
escalated_ticket_level = escalated_ticket_level_map.get(current_ticket_level)
escalated_location_id = escalated_location_id_map.get(current_ticket_level)
if not escalated_ticket_level or not escalated_location_id:
return CaseRuleActionResult()
today = ServerTime(datetime.utcnow()).user_time(pytz.timezone('Asia/Kolkata')).done().date()
result = update_case(
case.domain,
case.case_id,
case_properties={
'ticket_level': escalated_ticket_level,
'change_in_level': '1',
'touch_case_date': today.strftime('%Y-%m-%d'),
},
close=False,
xmlns=AUTO_UPDATE_XMLNS,
)
...
|
9349adb2efa5f0242cf9250d74d714a7e6aea1e9
|
ordination/__init__.py
|
ordination/__init__.py
|
from .base import CA, RDA, CCA
__all__ = ['CA', 'RDA', 'CCA']
#
#from numpy.testing import Tester
#test = Tester().test
__version__ = '0.1-dev'
|
from .base import CA, RDA, CCA
__all__ = ['CA', 'RDA', 'CCA']
#
#from numpy.testing import Tester
#test = Tester().test
# Compatible with PEP386
__version__ = '0.1.dev'
|
Make version compatible with PEP386
|
MAINT: Make version compatible with PEP386
|
Python
|
bsd-3-clause
|
xguse/scikit-bio,wdwvt1/scikit-bio,johnchase/scikit-bio,xguse/scikit-bio,colinbrislawn/scikit-bio,Achuth17/scikit-bio,Achuth17/scikit-bio,jdrudolph/scikit-bio,Kleptobismol/scikit-bio,jensreeder/scikit-bio,Jorge-C/bipy,jairideout/scikit-bio,kdmurray91/scikit-bio,averagehat/scikit-bio,wdwvt1/scikit-bio,Kleptobismol/scikit-bio,corburn/scikit-bio,anderspitman/scikit-bio,johnchase/scikit-bio,SamStudio8/scikit-bio,averagehat/scikit-bio,anderspitman/scikit-bio,gregcaporaso/scikit-bio,jensreeder/scikit-bio,corburn/scikit-bio,colinbrislawn/scikit-bio,demis001/scikit-bio,Kleptobismol/scikit-bio,jdrudolph/scikit-bio,jairideout/scikit-bio,kdmurray91/scikit-bio,gregcaporaso/scikit-bio,demis001/scikit-bio,SamStudio8/scikit-bio
|
python
|
## Code Before:
from .base import CA, RDA, CCA
__all__ = ['CA', 'RDA', 'CCA']
#
#from numpy.testing import Tester
#test = Tester().test
__version__ = '0.1-dev'
## Instruction:
MAINT: Make version compatible with PEP386
## Code After:
from .base import CA, RDA, CCA
__all__ = ['CA', 'RDA', 'CCA']
#
#from numpy.testing import Tester
#test = Tester().test
# Compatible with PEP386
__version__ = '0.1.dev'
|
# ... existing code ...
#from numpy.testing import Tester
#test = Tester().test
# Compatible with PEP386
__version__ = '0.1.dev'
# ... rest of the code ...
|
f033a95cf9f14efdba544d354020d0b5e12ed11d
|
opendaylight/md-sal/sal-common-util/src/main/java/org/opendaylight/controller/sal/common/util/Arguments.java
|
opendaylight/md-sal/sal-common-util/src/main/java/org/opendaylight/controller/sal/common/util/Arguments.java
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.common.util;
public final class Arguments {
private Arguments() {
throw new UnsupportedOperationException("Utility class");
}
/**
* Checks if value is instance of provided class
*
*
* @param value Value to check
* @param type Type to check
* @return Reference which was checked
*/
@SuppressWarnings("unchecked")
public static <T> T checkInstanceOf(Object value, Class<T> type) {
if(!type.isInstance(value))
throw new IllegalArgumentException(String.format("Value %s is not of type %s", value, type));
return (T) value;
}
}
|
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.common.util;
public final class Arguments {
private Arguments() {
throw new UnsupportedOperationException("Utility class");
}
/**
* Checks if value is instance of provided class
*
*
* @param value Value to check
* @param type Type to check
* @return Reference which was checked
*/
@SuppressWarnings("unchecked")
public static <T> T checkInstanceOf(Object value, Class<T> type) {
if(!type.isInstance(value)) {
throw new IllegalArgumentException(String.format("Value %s is not of type %s", value, type));
}
return (T) value;
}
}
|
Fix checkstyle if-statements must use braces sal-common-util
|
Fix checkstyle if-statements must use braces sal-common-util
Change-Id: I518b9fa156af55c080d7e6a55067deab2c789a42
Signed-off-by: Thanh Ha <[email protected]>
|
Java
|
epl-1.0
|
mandeepdhami/controller,Johnson-Chou/test,mandeepdhami/controller,inocybe/odl-controller,tx1103mark/controller,inocybe/odl-controller,my76128/controller,aryantaheri/monitoring-controller,my76128/controller,mandeepdhami/controller,tx1103mark/controller,aryantaheri/monitoring-controller,522986491/controller,aryantaheri/monitoring-controller,aryantaheri/monitoring-controller,522986491/controller,my76128/controller,Johnson-Chou/test,Sushma7785/OpenDayLight-Load-Balancer,my76128/controller,mandeepdhami/controller,tx1103mark/controller,tx1103mark/controller,opendaylight/controller,Sushma7785/OpenDayLight-Load-Balancer
|
java
|
## Code Before:
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.common.util;
public final class Arguments {
private Arguments() {
throw new UnsupportedOperationException("Utility class");
}
/**
* Checks if value is instance of provided class
*
*
* @param value Value to check
* @param type Type to check
* @return Reference which was checked
*/
@SuppressWarnings("unchecked")
public static <T> T checkInstanceOf(Object value, Class<T> type) {
if(!type.isInstance(value))
throw new IllegalArgumentException(String.format("Value %s is not of type %s", value, type));
return (T) value;
}
}
## Instruction:
Fix checkstyle if-statements must use braces sal-common-util
Change-Id: I518b9fa156af55c080d7e6a55067deab2c789a42
Signed-off-by: Thanh Ha <[email protected]>
## Code After:
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.common.util;
public final class Arguments {
private Arguments() {
throw new UnsupportedOperationException("Utility class");
}
/**
* Checks if value is instance of provided class
*
*
* @param value Value to check
* @param type Type to check
* @return Reference which was checked
*/
@SuppressWarnings("unchecked")
public static <T> T checkInstanceOf(Object value, Class<T> type) {
if(!type.isInstance(value)) {
throw new IllegalArgumentException(String.format("Value %s is not of type %s", value, type));
}
return (T) value;
}
}
|
# ... existing code ...
*/
@SuppressWarnings("unchecked")
public static <T> T checkInstanceOf(Object value, Class<T> type) {
if(!type.isInstance(value)) {
throw new IllegalArgumentException(String.format("Value %s is not of type %s", value, type));
}
return (T) value;
}
}
# ... rest of the code ...
|
22bca7f50c9db97aaa79c1199b385b0f59968328
|
client/tests/tests_msgpack.py
|
client/tests/tests_msgpack.py
|
import unittest
from msgpack import *
class MessagePackTestCase(unittest.TestCase):
"""
This is not really a comprehensive test suite for messagepack but instead a
way to learn how to use the api.
"""
def test_can_pack_fixarray(self):
"""
Checks that we can pack a fix array (len(array) < 16).
"""
data = [1,2,3]
expected = bytes([0x93, 1, 2, 3])
self.assertEqual(expected, packb(data))
def test_can_pack_bytes(self):
"""
Checks that we can use binary types. By default msgpack uses str types
for bytes() so we need to use a Packer object correctly configured.
"""
packer = Packer(use_bin_type=True)
data = bytes([0, 1, 2, 3])
# Format is 0xc4, lenght, data
expected = bytes([0xc4, 4, 0, 1, 2, 3])
self.assertEqual(expected, packer.pack(data))
|
import unittest
from msgpack import *
class MessagePackTestCase(unittest.TestCase):
"""
This is not really a comprehensive test suite for messagepack but instead a
way to learn how to use the api.
"""
def test_can_pack_fixarray(self):
"""
Checks that we can pack a fix array (len(array) < 16).
"""
data = [1,2,3]
expected = bytes([0x93, 1, 2, 3])
self.assertEqual(expected, packb(data))
def test_can_pack_bytes(self):
"""
Checks that we can use binary types. By default msgpack uses str types
for bytes() so we need to use a Packer object correctly configured.
"""
packer = Packer(use_bin_type=True)
data = bytes([0, 1, 2, 3])
# Format is 0xc4, lenght, data
expected = bytes([0xc4, 4, 0, 1, 2, 3])
self.assertEqual(expected, packer.pack(data))
def test_can_unpack_multiple_values(self):
"""
Checks that we can unpack a stream of value as used in the command format.
"""
packer = Packer(use_bin_type=True)
# Creates command stream
data = packb(1) + packb([1,2,3])
# Stream deserializes it
a = Unpacker()
a.feed(data)
self.assertEqual(list(a), [1, [1,2,3]])
|
Add an example showing stream unpacking
|
Add an example showing stream unpacking
|
Python
|
bsd-2-clause
|
cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader
|
python
|
## Code Before:
import unittest
from msgpack import *
class MessagePackTestCase(unittest.TestCase):
"""
This is not really a comprehensive test suite for messagepack but instead a
way to learn how to use the api.
"""
def test_can_pack_fixarray(self):
"""
Checks that we can pack a fix array (len(array) < 16).
"""
data = [1,2,3]
expected = bytes([0x93, 1, 2, 3])
self.assertEqual(expected, packb(data))
def test_can_pack_bytes(self):
"""
Checks that we can use binary types. By default msgpack uses str types
for bytes() so we need to use a Packer object correctly configured.
"""
packer = Packer(use_bin_type=True)
data = bytes([0, 1, 2, 3])
# Format is 0xc4, lenght, data
expected = bytes([0xc4, 4, 0, 1, 2, 3])
self.assertEqual(expected, packer.pack(data))
## Instruction:
Add an example showing stream unpacking
## Code After:
import unittest
from msgpack import *
class MessagePackTestCase(unittest.TestCase):
"""
This is not really a comprehensive test suite for messagepack but instead a
way to learn how to use the api.
"""
def test_can_pack_fixarray(self):
"""
Checks that we can pack a fix array (len(array) < 16).
"""
data = [1,2,3]
expected = bytes([0x93, 1, 2, 3])
self.assertEqual(expected, packb(data))
def test_can_pack_bytes(self):
"""
Checks that we can use binary types. By default msgpack uses str types
for bytes() so we need to use a Packer object correctly configured.
"""
packer = Packer(use_bin_type=True)
data = bytes([0, 1, 2, 3])
# Format is 0xc4, lenght, data
expected = bytes([0xc4, 4, 0, 1, 2, 3])
self.assertEqual(expected, packer.pack(data))
def test_can_unpack_multiple_values(self):
"""
Checks that we can unpack a stream of value as used in the command format.
"""
packer = Packer(use_bin_type=True)
# Creates command stream
data = packb(1) + packb([1,2,3])
# Stream deserializes it
a = Unpacker()
a.feed(data)
self.assertEqual(list(a), [1, [1,2,3]])
|
# ... existing code ...
# Format is 0xc4, lenght, data
expected = bytes([0xc4, 4, 0, 1, 2, 3])
self.assertEqual(expected, packer.pack(data))
def test_can_unpack_multiple_values(self):
"""
Checks that we can unpack a stream of value as used in the command format.
"""
packer = Packer(use_bin_type=True)
# Creates command stream
data = packb(1) + packb([1,2,3])
# Stream deserializes it
a = Unpacker()
a.feed(data)
self.assertEqual(list(a), [1, [1,2,3]])
# ... rest of the code ...
|
6820de9ccdb7cc7263142108881cf98aab85adb1
|
space-age/space_age.py
|
space-age/space_age.py
|
class SpaceAge(object):
"""docstring for SpaceAge."""
def __init__(self, _seconds):
self._seconds = _seconds
def on_earth(self):
return round((self._seconds / 31557600), 2)
def on_mercury(self):
return round((self._seconds / 31557600) * 0.240846, 2)
obj = SpaceAge(1e6)
print (obj.on_earth())
print (obj.on_mercury())
|
class SpaceAge(object):
"""docstring for SpaceAge."""
def __init__(self, _seconds):
self._seconds = _seconds
def on_earth(self):
return round((self._seconds / 31557600), 2)
def on_mercury(self):
planet = self.on_earth() * 0.2408467
return planet
def on_venus(self):
planet = self.on_earth() * 0.61519726
return planet
def on_mars(self):
planet = self.on_earth() * 1.8808158
return planet
def on_jupiter(self):
planet = self.on_earth() * 11.862615
return planet
def on_saturn(self):
planet = self.on_earth() * 29.447498
return planet
def on_uranus(self):
planet = self.on_earth() * 84.016846
return planet
def on_neptune(self):
planet = self.on_earth() * 164.79132
return planet
obj = SpaceAge(1e6)
print (obj.on_earth())
print (obj.on_mercury())
|
Add other planets age function
|
Add other planets age function
|
Python
|
mit
|
amalshehu/exercism-python
|
python
|
## Code Before:
class SpaceAge(object):
"""docstring for SpaceAge."""
def __init__(self, _seconds):
self._seconds = _seconds
def on_earth(self):
return round((self._seconds / 31557600), 2)
def on_mercury(self):
return round((self._seconds / 31557600) * 0.240846, 2)
obj = SpaceAge(1e6)
print (obj.on_earth())
print (obj.on_mercury())
## Instruction:
Add other planets age function
## Code After:
class SpaceAge(object):
"""docstring for SpaceAge."""
def __init__(self, _seconds):
self._seconds = _seconds
def on_earth(self):
return round((self._seconds / 31557600), 2)
def on_mercury(self):
planet = self.on_earth() * 0.2408467
return planet
def on_venus(self):
planet = self.on_earth() * 0.61519726
return planet
def on_mars(self):
planet = self.on_earth() * 1.8808158
return planet
def on_jupiter(self):
planet = self.on_earth() * 11.862615
return planet
def on_saturn(self):
planet = self.on_earth() * 29.447498
return planet
def on_uranus(self):
planet = self.on_earth() * 84.016846
return planet
def on_neptune(self):
planet = self.on_earth() * 164.79132
return planet
obj = SpaceAge(1e6)
print (obj.on_earth())
print (obj.on_mercury())
|
// ... existing code ...
return round((self._seconds / 31557600), 2)
def on_mercury(self):
planet = self.on_earth() * 0.2408467
return planet
def on_venus(self):
planet = self.on_earth() * 0.61519726
return planet
def on_mars(self):
planet = self.on_earth() * 1.8808158
return planet
def on_jupiter(self):
planet = self.on_earth() * 11.862615
return planet
def on_saturn(self):
planet = self.on_earth() * 29.447498
return planet
def on_uranus(self):
planet = self.on_earth() * 84.016846
return planet
def on_neptune(self):
planet = self.on_earth() * 164.79132
return planet
obj = SpaceAge(1e6)
print (obj.on_earth())
// ... rest of the code ...
|
61d71b27111f255c3dad3f974e6c7e0ace0c2ce9
|
karld/iter_utils.py
|
karld/iter_utils.py
|
from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
|
from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
|
Remove grouper and grouper based batchers
|
Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object.
|
Python
|
apache-2.0
|
johnwlockwood/stream_tap,johnwlockwood/karl_data,johnwlockwood/iter_karld_tools,johnwlockwood/stream_tap
|
python
|
## Code Before:
from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
## Instruction:
Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object.
## Code After:
from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
|
// ... existing code ...
from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
// ... modified code ...
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
// ... rest of the code ...
|
a1bcb99691f5a0238f6a34a5579df3e89e8d6823
|
child_sync_gp/model/project_compassion.py
|
child_sync_gp/model/project_compassion.py
|
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
Fix bug in write project.
|
Fix bug in write project.
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland
|
python
|
## Code Before:
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
## Instruction:
Fix bug in write project.
## Code After:
from openerp.osv import orm
from . import gp_connector
class project_compassion(orm.Model):
_inherit = 'compassion.project'
def write(self, cr, uid, ids, vals, context=None):
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
return res
|
...
"""Update Project in GP."""
res = super(project_compassion, self).write(cr, uid, ids, vals,
context)
if not isinstance(ids, list):
ids = [ids]
gp_connect = gp_connector.GPConnect()
for project in self.browse(cr, uid, ids, context):
gp_connect.upsert_project(uid, project)
...
|
dfd4a6f6b23447538b2b22da11666f5218d791db
|
mots_vides/constants.py
|
mots_vides/constants.py
|
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
|
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
LANGUAGE_CODES = {
'af': 'afrikaans',
'ar': 'arabic',
'az': 'azerbaijani',
'bg': 'bulgarian',
'be': 'belarusian',
'bn': 'bengali',
'br': 'breton',
'bs': 'bosnian',
'ca': 'catalan',
'cs': 'czech',
'cy': 'welsh',
'da': 'danish',
'de': 'german',
'el': 'greek',
'en': 'english',
'eo': 'esperanto',
'es': 'spanish',
'et': 'estonian',
'eu': 'basque',
'fa': 'persian',
'fi': 'finnish',
'fr': 'french',
'fy': 'frisian',
'ga': 'irish',
'gl': 'galician',
'he': 'hebrew',
'hi': 'hindi',
'hr': 'croatian',
'hu': 'hungarian',
'ia': 'interlingua',
'id': 'indonesian',
'io': 'ido',
'is': 'icelandic',
'it': 'italian',
'ja': 'japanese',
'ka': 'georgian',
'kk': 'kazakh',
'km': 'khmer',
'kn': 'kannada',
'ko': 'korean',
'lb': 'luxembourgish',
'lt': 'lithuanian',
'lv': 'latvian',
'mk': 'macedonian',
'ml': 'malayalam',
'mn': 'mongolian',
'mr': 'marathi',
'my': 'burmese',
'nb': 'norwegian',
'ne': 'nepali',
'nl': 'dutch',
'os': 'ossetic',
'pa': 'punjabi',
'pl': 'polish',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'sk': 'slovak',
'sl': 'slovenian',
'sq': 'albanian',
'sr': 'serbian',
'sv': 'swedish',
'sw': 'swahili',
'ta': 'tamil',
'te': 'telugu',
'th': 'thai',
'tr': 'turkish',
'tt': 'tatar',
'uk': 'ukrainian',
'ur': 'urdu',
'vi': 'vietnamese',
'zh': 'chinese',
}
|
Define a complete list of language code, for easy future maintenance
|
Define a complete list of language code, for easy future maintenance
|
Python
|
bsd-3-clause
|
Fantomas42/mots-vides,Fantomas42/mots-vides
|
python
|
## Code Before:
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
## Instruction:
Define a complete list of language code, for easy future maintenance
## Code After:
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
LANGUAGE_CODES = {
'af': 'afrikaans',
'ar': 'arabic',
'az': 'azerbaijani',
'bg': 'bulgarian',
'be': 'belarusian',
'bn': 'bengali',
'br': 'breton',
'bs': 'bosnian',
'ca': 'catalan',
'cs': 'czech',
'cy': 'welsh',
'da': 'danish',
'de': 'german',
'el': 'greek',
'en': 'english',
'eo': 'esperanto',
'es': 'spanish',
'et': 'estonian',
'eu': 'basque',
'fa': 'persian',
'fi': 'finnish',
'fr': 'french',
'fy': 'frisian',
'ga': 'irish',
'gl': 'galician',
'he': 'hebrew',
'hi': 'hindi',
'hr': 'croatian',
'hu': 'hungarian',
'ia': 'interlingua',
'id': 'indonesian',
'io': 'ido',
'is': 'icelandic',
'it': 'italian',
'ja': 'japanese',
'ka': 'georgian',
'kk': 'kazakh',
'km': 'khmer',
'kn': 'kannada',
'ko': 'korean',
'lb': 'luxembourgish',
'lt': 'lithuanian',
'lv': 'latvian',
'mk': 'macedonian',
'ml': 'malayalam',
'mn': 'mongolian',
'mr': 'marathi',
'my': 'burmese',
'nb': 'norwegian',
'ne': 'nepali',
'nl': 'dutch',
'os': 'ossetic',
'pa': 'punjabi',
'pl': 'polish',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'sk': 'slovak',
'sl': 'slovenian',
'sq': 'albanian',
'sr': 'serbian',
'sv': 'swedish',
'sw': 'swahili',
'ta': 'tamil',
'te': 'telugu',
'th': 'thai',
'tr': 'turkish',
'tt': 'tatar',
'uk': 'ukrainian',
'ur': 'urdu',
'vi': 'vietnamese',
'zh': 'chinese',
}
|
# ... existing code ...
os.path.abspath(__file__)),
'datas/'
)
LANGUAGE_CODES = {
'af': 'afrikaans',
'ar': 'arabic',
'az': 'azerbaijani',
'bg': 'bulgarian',
'be': 'belarusian',
'bn': 'bengali',
'br': 'breton',
'bs': 'bosnian',
'ca': 'catalan',
'cs': 'czech',
'cy': 'welsh',
'da': 'danish',
'de': 'german',
'el': 'greek',
'en': 'english',
'eo': 'esperanto',
'es': 'spanish',
'et': 'estonian',
'eu': 'basque',
'fa': 'persian',
'fi': 'finnish',
'fr': 'french',
'fy': 'frisian',
'ga': 'irish',
'gl': 'galician',
'he': 'hebrew',
'hi': 'hindi',
'hr': 'croatian',
'hu': 'hungarian',
'ia': 'interlingua',
'id': 'indonesian',
'io': 'ido',
'is': 'icelandic',
'it': 'italian',
'ja': 'japanese',
'ka': 'georgian',
'kk': 'kazakh',
'km': 'khmer',
'kn': 'kannada',
'ko': 'korean',
'lb': 'luxembourgish',
'lt': 'lithuanian',
'lv': 'latvian',
'mk': 'macedonian',
'ml': 'malayalam',
'mn': 'mongolian',
'mr': 'marathi',
'my': 'burmese',
'nb': 'norwegian',
'ne': 'nepali',
'nl': 'dutch',
'os': 'ossetic',
'pa': 'punjabi',
'pl': 'polish',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'sk': 'slovak',
'sl': 'slovenian',
'sq': 'albanian',
'sr': 'serbian',
'sv': 'swedish',
'sw': 'swahili',
'ta': 'tamil',
'te': 'telugu',
'th': 'thai',
'tr': 'turkish',
'tt': 'tatar',
'uk': 'ukrainian',
'ur': 'urdu',
'vi': 'vietnamese',
'zh': 'chinese',
}
# ... rest of the code ...
|
68722936310358dbba2342366e72f0777c2d0ab5
|
clacksd/src/transport-server/cl_discovery_thread.c
|
clacksd/src/transport-server/cl_discovery_thread.c
|
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
continue;
}
}
|
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
struct CL_Discovery_Transport *discovered_transport = malloc(sizeof(struct CL_Discovery_Transport));
wait_for_transport(discovered_transport);
}
}
|
Call into the discovery function and wait for someone to say hi
|
Call into the discovery function and wait for someone to say hi
|
C
|
mit
|
jamessnee/clacks,jamessnee/clacks,jamessnee/clacks
|
c
|
## Code Before:
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
continue;
}
}
## Instruction:
Call into the discovery function and wait for someone to say hi
## Code After:
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
struct CL_Discovery_Transport *discovered_transport = malloc(sizeof(struct CL_Discovery_Transport));
wait_for_transport(discovered_transport);
}
}
|
# ... existing code ...
void * start_discovery(void * args) {
syslog(LOG_INFO, "started discovery thread");
for (;;) {
struct CL_Discovery_Transport *discovered_transport = malloc(sizeof(struct CL_Discovery_Transport));
wait_for_transport(discovered_transport);
}
}
# ... rest of the code ...
|
5cc511e2e7d685fe8c2983c14d42a4fcfa704c6b
|
heufybot/utils/__init__.py
|
heufybot/utils/__init__.py
|
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3)
ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3)
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def parseUserPrefix(prefix):
if "!" in prefix:
nick = prefix[:prefix.find("!")]
ident = prefix[prefix.find("!") + 1:prefix.find("@")]
host = prefix[prefix.find("@") + 1:]
return nick, ident, host
# Not all "users" have idents and hostnames
nick = prefix
return nick, None, None
|
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3)
ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3)
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def parseUserPrefix(prefix):
if "!" in prefix:
nick = prefix[:prefix.find("!")]
ident = prefix[prefix.find("!") + 1:prefix.find("@")]
host = prefix[prefix.find("@") + 1:]
return nick, ident, host
# Not all "users" have idents and hostnames
nick = prefix
return nick, None, None
def networkName(bot, server):
return bot.servers[server].supportHelper.network
|
Add a helper function to grab network names
|
Add a helper function to grab network names
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
python
|
## Code Before:
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3)
ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3)
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def parseUserPrefix(prefix):
if "!" in prefix:
nick = prefix[:prefix.find("!")]
ident = prefix[prefix.find("!") + 1:prefix.find("@")]
host = prefix[prefix.find("@") + 1:]
return nick, ident, host
# Not all "users" have idents and hostnames
nick = prefix
return nick, None, None
## Instruction:
Add a helper function to grab network names
## Code After:
def _enum(**enums):
return type('Enum', (), enums)
ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3)
ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3)
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def parseUserPrefix(prefix):
if "!" in prefix:
nick = prefix[:prefix.find("!")]
ident = prefix[prefix.find("!") + 1:prefix.find("@")]
host = prefix[prefix.find("@") + 1:]
return nick, ident, host
# Not all "users" have idents and hostnames
nick = prefix
return nick, None, None
def networkName(bot, server):
return bot.servers[server].supportHelper.network
|
...
# Not all "users" have idents and hostnames
nick = prefix
return nick, None, None
def networkName(bot, server):
return bot.servers[server].supportHelper.network
...
|
c23787680c40cc7f871f23e920486d07452d2cf3
|
traits/__init__.py
|
traits/__init__.py
|
from __future__ import absolute_import
__version__ = '4.3.0'
|
from __future__ import absolute_import
__version__ = '4.3.0'
# Add a NullHandler so 'traits' loggers don't complain when they get used.
import logging
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
del logging, logger, NullHandler
|
Use a NullHandler for all 'traits' loggers per best practice for logging.
|
FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.
|
Python
|
bsd-3-clause
|
burnpanck/traits,burnpanck/traits
|
python
|
## Code Before:
from __future__ import absolute_import
__version__ = '4.3.0'
## Instruction:
FIX: Use a NullHandler for all 'traits' loggers per best practice for logging.
## Code After:
from __future__ import absolute_import
__version__ = '4.3.0'
# Add a NullHandler so 'traits' loggers don't complain when they get used.
import logging
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
del logging, logger, NullHandler
|
...
from __future__ import absolute_import
__version__ = '4.3.0'
# Add a NullHandler so 'traits' loggers don't complain when they get used.
import logging
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
logger = logging.getLogger(__name__)
logger.addHandler(NullHandler())
del logging, logger, NullHandler
...
|
ccf60e9e79b8b2db8cbf7918caf23314e8790134
|
lib/reporter.py
|
lib/reporter.py
|
import sys
import os
name = sys.argv[1]
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s>" % name)
print("\t<status=\"%s\" />" % status)
if status != "SKIP":
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
|
import sys
import os
name = sys.argv[1]
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s status=\"%s\">" % (name, status))
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
|
Fix the XML format produced
|
Fix the XML format produced
|
Python
|
apache-2.0
|
CESNET/secant,CESNET/secant
|
python
|
## Code Before:
import sys
import os
name = sys.argv[1]
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s>" % name)
print("\t<status=\"%s\" />" % status)
if status != "SKIP":
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
## Instruction:
Fix the XML format produced
## Code After:
import sys
import os
name = sys.argv[1]
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s status=\"%s\">" % (name, status))
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
|
# ... existing code ...
status = sys.stdin.readline()
status = status.rstrip(os.linesep)
print("<%s status=\"%s\">" % (name, status))
print("\t<outcome>")
for line in sys.stdin:
# Escaping, ... !
print(line.rstrip(os.linesep))
print("\t</outcome>")
print("</%s>" % name)
# ... rest of the code ...
|
03977d24d5862373a881b7098bc78adc30fe8256
|
make_src_bem.py
|
make_src_bem.py
|
from __future__ import print_function
import mne
from my_settings import *
subject = sys.argv[1]
# make source space
src = mne.setup_source_space(subject, spacing='oct6',
subjects_dir=subjects_dir,
add_dist=False, overwrite=True)
# save source space
mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src)
conductivity = (0.3, 0.006, 0.3) # for three layers
model = mne.make_bem_model(subject=subject, ico=None,
conductivity=conductivity,
subjects_dir=subjects_dir)
bem = mne.make_bem_solution(model)
mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
|
from __future__ import print_function
import mne
import subprocess
from my_settings import *
subject = sys.argv[1]
cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis"
# make source space
src = mne.setup_source_space(subject, spacing='oct6',
subjects_dir=subjects_dir,
add_dist=False, overwrite=True)
# save source space
mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src)
setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % (
subject)
subprocess.call([cmd, "1", setup_forward])
# conductivity = (0.3, 0.006, 0.3) # for three layers
# model = mne.make_bem_model(subject=subject, ico=None,
# conductivity=conductivity,
# subjects_dir=subjects_dir)
# bem = mne.make_bem_solution(model)
# mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
|
Change to make BEM solution from mne-C
|
Change to make BEM solution from mne-C
|
Python
|
bsd-3-clause
|
MadsJensen/RP_scripts,MadsJensen/RP_scripts,MadsJensen/RP_scripts
|
python
|
## Code Before:
from __future__ import print_function
import mne
from my_settings import *
subject = sys.argv[1]
# make source space
src = mne.setup_source_space(subject, spacing='oct6',
subjects_dir=subjects_dir,
add_dist=False, overwrite=True)
# save source space
mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src)
conductivity = (0.3, 0.006, 0.3) # for three layers
model = mne.make_bem_model(subject=subject, ico=None,
conductivity=conductivity,
subjects_dir=subjects_dir)
bem = mne.make_bem_solution(model)
mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
## Instruction:
Change to make BEM solution from mne-C
## Code After:
from __future__ import print_function
import mne
import subprocess
from my_settings import *
subject = sys.argv[1]
cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis"
# make source space
src = mne.setup_source_space(subject, spacing='oct6',
subjects_dir=subjects_dir,
add_dist=False, overwrite=True)
# save source space
mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src)
setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % (
subject)
subprocess.call([cmd, "1", setup_forward])
# conductivity = (0.3, 0.006, 0.3) # for three layers
# model = mne.make_bem_model(subject=subject, ico=None,
# conductivity=conductivity,
# subjects_dir=subjects_dir)
# bem = mne.make_bem_solution(model)
# mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
|
# ... existing code ...
from __future__ import print_function
import mne
import subprocess
from my_settings import *
subject = sys.argv[1]
cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis"
# make source space
src = mne.setup_source_space(subject, spacing='oct6',
# ... modified code ...
subjects_dir=subjects_dir,
add_dist=False, overwrite=True)
# save source space
mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src)
setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % (
subject)
subprocess.call([cmd, "1", setup_forward])
# conductivity = (0.3, 0.006, 0.3) # for three layers
# model = mne.make_bem_model(subject=subject, ico=None,
# conductivity=conductivity,
# subjects_dir=subjects_dir)
# bem = mne.make_bem_solution(model)
# mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
# ... rest of the code ...
|
8885b3672c5bff8ba4e9583f9acafc62d09b7c83
|
src/main/java/com/github/anba/es6draft/runtime/types/IntegrityLevel.java
|
src/main/java/com/github/anba/es6draft/runtime/types/IntegrityLevel.java
|
/**
* Copyright (c) 2012-2013 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
package com.github.anba.es6draft.runtime.types;
/**
*
*/
public enum IntegrityLevel {
NonExtensible, Sealed, Frozen
}
|
/**
* Copyright (c) 2012-2013 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
package com.github.anba.es6draft.runtime.types;
/**
*
*/
public enum IntegrityLevel {
Sealed, Frozen
}
|
Remove unused 'NonExtensible' entry from enum
|
Remove unused 'NonExtensible' entry from enum
|
Java
|
mit
|
jugglinmike/es6draft,anba/es6draft,jugglinmike/es6draft,anba/es6draft,anba/es6draft,jugglinmike/es6draft,jugglinmike/es6draft
|
java
|
## Code Before:
/**
* Copyright (c) 2012-2013 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
package com.github.anba.es6draft.runtime.types;
/**
*
*/
public enum IntegrityLevel {
NonExtensible, Sealed, Frozen
}
## Instruction:
Remove unused 'NonExtensible' entry from enum
## Code After:
/**
* Copyright (c) 2012-2013 André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
package com.github.anba.es6draft.runtime.types;
/**
*
*/
public enum IntegrityLevel {
Sealed, Frozen
}
|
# ... existing code ...
*
*/
public enum IntegrityLevel {
Sealed, Frozen
}
# ... rest of the code ...
|
fa495f9f2f887533f870ddedef3a1aea0a699419
|
oscar/management/commands/oscar_fork_statics.py
|
oscar/management/commands/oscar_fork_statics.py
|
import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
import logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
Fix string formatting bug in fork_statics man. command
|
Fix string formatting bug in fork_statics man. command
|
Python
|
bsd-3-clause
|
amirrpp/django-oscar,kapt/django-oscar,ademuk/django-oscar,nickpack/django-oscar,eddiep1101/django-oscar,bschuon/django-oscar,vovanbo/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,elliotthill/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,ademuk/django-oscar,okfish/django-oscar,manevant/django-oscar,mexeniz/django-oscar,ademuk/django-oscar,jinnykoo/christmas,QLGu/django-oscar,bnprk/django-oscar,bschuon/django-oscar,ka7eh/django-oscar,WadeYuChen/django-oscar,john-parton/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,kapari/django-oscar,lijoantony/django-oscar,thechampanurag/django-oscar,sasha0/django-oscar,anentropic/django-oscar,anentropic/django-oscar,taedori81/django-oscar,kapt/django-oscar,eddiep1101/django-oscar,john-parton/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,pdonadeo/django-oscar,elliotthill/django-oscar,django-oscar/django-oscar,jinnykoo/wuyisj.com,pasqualguerrero/django-oscar,solarissmoke/django-oscar,mexeniz/django-oscar,elliotthill/django-oscar,ka7eh/django-oscar,manevant/django-oscar,solarissmoke/django-oscar,rocopartners/django-oscar,kapari/django-oscar,jinnykoo/wuyisj.com,nfletton/django-oscar,makielab/django-oscar,bschuon/django-oscar,dongguangming/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,dongguangming/django-oscar,saadatqadri/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,dongguangming/django-oscar,nickpack/django-oscar,jmt4/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,binarydud/django-oscar,lijoantony/django-oscar,WadeYuChen/django-oscar,itbabu/django-oscar,QLGu/django-oscar,michaelkuty/django-oscar,jlmadurga/django-oscar,bschuon/django-oscar,okfish/django-oscar,bnprk/django-oscar,vovanbo/django-oscar,faratro/django-oscar,sasha0/django-oscar,faratro/django-oscar,solarissmoke/django-oscar,monikasulik/django-oscar,Jannes123/django-oscar,nfletton/django-oscar,manevant/django-oscar,josesanch/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,adamend/django-oscar,jinnykoo/wuyisj,lijoantony/django-oscar,jinnykoo/wuyisj.com,jlmadurga/django-oscar,ademuk/django-oscar,jinnykoo/christmas,nickpack/django-oscar,makielab/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,Jannes123/django-oscar,pdonadeo/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,josesanch/django-oscar,Idematica/django-oscar,thechampanurag/django-oscar,adamend/django-oscar,jinnykoo/wuyisj,bnprk/django-oscar,ahmetdaglarbas/e-commerce,pdonadeo/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,michaelkuty/django-oscar,marcoantoniooliveira/labweb,DrOctogon/unwash_ecom,monikasulik/django-oscar,django-oscar/django-oscar,amirrpp/django-oscar,WillisXChen/django-oscar,ka7eh/django-oscar,sonofatailor/django-oscar,manevant/django-oscar,WillisXChen/django-oscar,MatthewWilkes/django-oscar,okfish/django-oscar,jinnykoo/wuyisj,lijoantony/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,spartonia/django-oscar,nfletton/django-oscar,taedori81/django-oscar,Bogh/django-oscar,john-parton/django-oscar,binarydud/django-oscar,spartonia/django-oscar,machtfit/django-oscar,Idematica/django-oscar,makielab/django-oscar,jmt4/django-oscar,anentropic/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,spartonia/django-oscar,itbabu/django-oscar,spartonia/django-oscar,QLGu/django-oscar,binarydud/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,rocopartners/django-oscar,DrOctogon/unwash_ecom,jinnykoo/christmas,ka7eh/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,michaelkuty/django-oscar,pasqualguerrero/django-oscar,Bogh/django-oscar,taedori81/django-oscar,Idematica/django-oscar,jinnykoo/wuyisj.com,MatthewWilkes/django-oscar,jinnykoo/wuyisj,adamend/django-oscar,Bogh/django-oscar,solarissmoke/django-oscar,kapt/django-oscar,WillisXChen/django-oscar,saadatqadri/django-oscar,michaelkuty/django-oscar,Bogh/django-oscar,pdonadeo/django-oscar,faratro/django-oscar,mexeniz/django-oscar,kapari/django-oscar,thechampanurag/django-oscar,monikasulik/django-oscar,itbabu/django-oscar,bnprk/django-oscar,nickpack/django-oscar,sasha0/django-oscar,saadatqadri/django-oscar,josesanch/django-oscar,WadeYuChen/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,monikasulik/django-oscar,jmt4/django-oscar,adamend/django-oscar,john-parton/django-oscar,thechampanurag/django-oscar,machtfit/django-oscar,marcoantoniooliveira/labweb,jlmadurga/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,rocopartners/django-oscar,makielab/django-oscar,faratro/django-oscar,okfish/django-oscar,anentropic/django-oscar,WadeYuChen/django-oscar,marcoantoniooliveira/labweb,amirrpp/django-oscar,ahmetdaglarbas/e-commerce
|
python
|
## Code Before:
import logging
import os
import shutil
from django.db.models import get_model
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
ProductAlert = get_model('customer', 'ProductAlert')
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (source, destination)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
## Instruction:
Fix string formatting bug in fork_statics man. command
## Code After:
import logging
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Copy Oscar's statics into local project so they can be used as a base for
styling a new site.
"""
args = '<destination folder>'
help = "Copy Oscar's static files"
def handle(self, *args, **options):
# Determine where to copy to
folder = args[0] if args else 'static'
if not folder.startswith('/'):
destination = os.path.join(os.getcwd(), folder)
else:
destination = folder
if os.path.exists(destination):
raise CommandError(
"The folder %s already exists - aborting!" % destination)
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
if destination not in settings.STATICFILES_DIRS:
print ("You need to add %s to STATICFILES_DIRS in order for your "
"local overrides to be picked up") % destination
|
...
import os
import shutil
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
logger = logging.getLogger(__name__)
...
source = os.path.realpath(
os.path.join(os.path.dirname(__file__), '../../static'))
print "Copying Oscar's static files to %s" % (destination,)
shutil.copytree(source, destination)
# Check if this new folder is in STATICFILES_DIRS
...
|
c0169c5073e4a83120f4d6860258c3085b4c1cf5
|
setup.py
|
setup.py
|
import subprocess as sp
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
sp.check_call(['flit', 'install', '--deps', 'production'])
|
import subprocess as sp
import sys
import os
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
flit = os.path.join(os.path.dirname(sys.executable), 'flit')
cmd = [flit, 'install', '--deps', 'production']
print(" ".join(cmd))
sp.check_call(cmd)
|
Use flit that's been installed in the virtualenv
|
Use flit that's been installed in the virtualenv
|
Python
|
bsd-3-clause
|
jupyter/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,dementrock/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader
|
python
|
## Code Before:
import subprocess as sp
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
sp.check_call(['flit', 'install', '--deps', 'production'])
## Instruction:
Use flit that's been installed in the virtualenv
## Code After:
import subprocess as sp
import sys
import os
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
flit = os.path.join(os.path.dirname(sys.executable), 'flit')
cmd = [flit, 'install', '--deps', 'production']
print(" ".join(cmd))
sp.check_call(cmd)
|
# ... existing code ...
import subprocess as sp
import sys
import os
print('Warning: this setup.py uses flit, not setuptools.')
print('Behavior may not be exactly what you expect. Use at your own risk!')
flit = os.path.join(os.path.dirname(sys.executable), 'flit')
cmd = [flit, 'install', '--deps', 'production']
print(" ".join(cmd))
sp.check_call(cmd)
# ... rest of the code ...
|
19ce5e58c639c6502126a94c61ec104abee1449d
|
src/uk/org/ponder/rsf/uitype/BooleanUIType.java
|
src/uk/org/ponder/rsf/uitype/BooleanUIType.java
|
/*
* Created on Nov 11, 2005
*/
package uk.org.ponder.rsf.uitype;
public class BooleanUIType implements UIType {
public static final BooleanUIType instance = new BooleanUIType();
public Boolean PLACEHOLDER = new Boolean(false);
public Object getPlaceholder() {
return PLACEHOLDER;
}
public String getName() {
return "boolean";
}
public boolean valueUnchanged(Object oldvalue, Object newvalue) {
return oldvalue.equals(newvalue);
}
}
|
/*
* Created on Nov 11, 2005
*/
package uk.org.ponder.rsf.uitype;
public class BooleanUIType implements UIType {
public static final BooleanUIType instance = new BooleanUIType();
public Boolean PLACEHOLDER = Boolean.FALSE;
public Object getPlaceholder() {
return PLACEHOLDER;
}
public String getName() {
return "boolean";
}
public boolean valueUnchanged(Object oldvalue, Object newvalue) {
return oldvalue.equals(newvalue);
}
}
|
Use constant to generate Boolean
|
Use constant to generate Boolean
|
Java
|
bsd-3-clause
|
rsf/RSFUtil,rsf/RSFUtil,axxter99/RSFUtil,axxter99/RSFUtil
|
java
|
## Code Before:
/*
* Created on Nov 11, 2005
*/
package uk.org.ponder.rsf.uitype;
public class BooleanUIType implements UIType {
public static final BooleanUIType instance = new BooleanUIType();
public Boolean PLACEHOLDER = new Boolean(false);
public Object getPlaceholder() {
return PLACEHOLDER;
}
public String getName() {
return "boolean";
}
public boolean valueUnchanged(Object oldvalue, Object newvalue) {
return oldvalue.equals(newvalue);
}
}
## Instruction:
Use constant to generate Boolean
## Code After:
/*
* Created on Nov 11, 2005
*/
package uk.org.ponder.rsf.uitype;
public class BooleanUIType implements UIType {
public static final BooleanUIType instance = new BooleanUIType();
public Boolean PLACEHOLDER = Boolean.FALSE;
public Object getPlaceholder() {
return PLACEHOLDER;
}
public String getName() {
return "boolean";
}
public boolean valueUnchanged(Object oldvalue, Object newvalue) {
return oldvalue.equals(newvalue);
}
}
|
// ... existing code ...
public class BooleanUIType implements UIType {
public static final BooleanUIType instance = new BooleanUIType();
public Boolean PLACEHOLDER = Boolean.FALSE;
public Object getPlaceholder() {
return PLACEHOLDER;
}
// ... rest of the code ...
|
e422f77898853fc759d3828c4053b799cd2b1fa3
|
plumeria/plugins/bot_control.py
|
plumeria/plugins/bot_control.py
|
from plumeria.command import commands, CommandError
from plumeria.message.lists import build_list
from plumeria.perms import owners_only
from plumeria.transport import transports
@commands.register('accept invite', category='Discord')
@owners_only
async def accept_invite(message):
"""
Accept an invite to join a server.
Example::
/accept invite https://discord.gg/00000
"""
url = message.content.strip()
results = []
if not len(url):
raise CommandError("Supply an invite URL.")
for transport in transports.transports.values():
if hasattr(transport, 'accept_invite'):
try:
await transport.accept_invite(url)
results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}'))
except Exception as e:
results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e))))
else:
results.append((transport.id, "\N{WARNING SIGN} No support for invite links"))
if len(results):
return build_list(["**{}:** {}".format(e[0], e[1]) for e in results])
else:
raise CommandError("No transports available.")
|
from plumeria.command import commands, CommandError
from plumeria.message.lists import build_list
from plumeria.perms import owners_only
from plumeria.transport import transports
@commands.register('join', category='Discord')
@owners_only
async def join(message):
"""
Accept an invite to join a server.
Example::
/join https://discord.gg/00000
"""
url = message.content.strip()
results = []
if not len(url):
raise CommandError("Supply an invite URL.")
for transport in transports.transports.values():
if hasattr(transport, 'accept_invite'):
try:
await transport.accept_invite(url)
results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}'))
except Exception as e:
results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e))))
else:
results.append((transport.id, "\N{WARNING SIGN} No support for invite links"))
if len(results):
return build_list(["**{}:** {}".format(e[0], e[1]) for e in results])
else:
raise CommandError("No transports available.")
|
Use /join instead of /accept invite.
|
Use /join instead of /accept invite.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
python
|
## Code Before:
from plumeria.command import commands, CommandError
from plumeria.message.lists import build_list
from plumeria.perms import owners_only
from plumeria.transport import transports
@commands.register('accept invite', category='Discord')
@owners_only
async def accept_invite(message):
"""
Accept an invite to join a server.
Example::
/accept invite https://discord.gg/00000
"""
url = message.content.strip()
results = []
if not len(url):
raise CommandError("Supply an invite URL.")
for transport in transports.transports.values():
if hasattr(transport, 'accept_invite'):
try:
await transport.accept_invite(url)
results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}'))
except Exception as e:
results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e))))
else:
results.append((transport.id, "\N{WARNING SIGN} No support for invite links"))
if len(results):
return build_list(["**{}:** {}".format(e[0], e[1]) for e in results])
else:
raise CommandError("No transports available.")
## Instruction:
Use /join instead of /accept invite.
## Code After:
from plumeria.command import commands, CommandError
from plumeria.message.lists import build_list
from plumeria.perms import owners_only
from plumeria.transport import transports
@commands.register('join', category='Discord')
@owners_only
async def join(message):
"""
Accept an invite to join a server.
Example::
/join https://discord.gg/00000
"""
url = message.content.strip()
results = []
if not len(url):
raise CommandError("Supply an invite URL.")
for transport in transports.transports.values():
if hasattr(transport, 'accept_invite'):
try:
await transport.accept_invite(url)
results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}'))
except Exception as e:
results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e))))
else:
results.append((transport.id, "\N{WARNING SIGN} No support for invite links"))
if len(results):
return build_list(["**{}:** {}".format(e[0], e[1]) for e in results])
else:
raise CommandError("No transports available.")
|
# ... existing code ...
from plumeria.transport import transports
@commands.register('join', category='Discord')
@owners_only
async def join(message):
"""
Accept an invite to join a server.
Example::
/join https://discord.gg/00000
"""
url = message.content.strip()
results = []
# ... rest of the code ...
|
cefbcda91d6f9d5a0fce97c7b72844f8dcb8d8cf
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
from .fixtures import *
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true", help="run slow tests")
def pytest_runtest_setup(item):
if "slow" in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
import pytest
import os.path
from functools import lru_cache
from django.conf import settings
from .fixtures import *
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true", help="run slow tests")
def pytest_runtest_setup(item):
if "slow" in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
@lru_cache(maxsize=4)
def _get_sql():
path = os.path.join(settings.BASE_DIR, "sql", "tags.sql")
with open(path, "r") as f:
return f.read()
def on_db_connect(sender, connection, **kwargs):
cursor = connection.cursor()
cursor.execute(_get_sql())
from django.db.backends import signals
signals.connection_created.connect(on_db_connect)
|
Load tags sql on connection is created on tests.
|
Load tags sql on connection is created on tests.
|
Python
|
agpl-3.0
|
seanchen/taiga-back,obimod/taiga-back,frt-arch/taiga-back,CoolCloud/taiga-back,EvgeneOskin/taiga-back,rajiteh/taiga-back,bdang2012/taiga-back-casting,dayatz/taiga-back,astronaut1712/taiga-back,gauravjns/taiga-back,dayatz/taiga-back,forging2012/taiga-back,bdang2012/taiga-back-casting,dycodedev/taiga-back,taigaio/taiga-back,crr0004/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,CoolCloud/taiga-back,obimod/taiga-back,astagi/taiga-back,WALR/taiga-back,astagi/taiga-back,rajiteh/taiga-back,Tigerwhit4/taiga-back,joshisa/taiga-back,CMLL/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,Zaneh-/bearded-tribble-back,gam-phon/taiga-back,bdang2012/taiga-back-casting,19kestier/taiga-back,dycodedev/taiga-back,Zaneh-/bearded-tribble-back,rajiteh/taiga-back,EvgeneOskin/taiga-back,frt-arch/taiga-back,gam-phon/taiga-back,astagi/taiga-back,coopsource/taiga-back,seanchen/taiga-back,xdevelsistemas/taiga-back-community,gauravjns/taiga-back,astronaut1712/taiga-back,WALR/taiga-back,forging2012/taiga-back,19kestier/taiga-back,obimod/taiga-back,CMLL/taiga-back,astronaut1712/taiga-back,obimod/taiga-back,gam-phon/taiga-back,dycodedev/taiga-back,Rademade/taiga-back,gauravjns/taiga-back,coopsource/taiga-back,CMLL/taiga-back,forging2012/taiga-back,astronaut1712/taiga-back,19kestier/taiga-back,dayatz/taiga-back,CoolCloud/taiga-back,jeffdwyatt/taiga-back,seanchen/taiga-back,Rademade/taiga-back,crr0004/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,Tigerwhit4/taiga-back,crr0004/taiga-back,CoolCloud/taiga-back,EvgeneOskin/taiga-back,WALR/taiga-back,frt-arch/taiga-back,astagi/taiga-back,EvgeneOskin/taiga-back,Rademade/taiga-back,crr0004/taiga-back,joshisa/taiga-back,rajiteh/taiga-back,Rademade/taiga-back,joshisa/taiga-back,jeffdwyatt/taiga-back,seanchen/taiga-back,forging2012/taiga-back,Tigerwhit4/taiga-back,dycodedev/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,bdang2012/taiga-back-casting,gam-phon/taiga-back,CMLL/taiga-back,taigaio/taiga-back,Zaneh-/bearded-tribble-back,Rademade/taiga-back
|
python
|
## Code Before:
import pytest
from .fixtures import *
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true", help="run slow tests")
def pytest_runtest_setup(item):
if "slow" in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
## Instruction:
Load tags sql on connection is created on tests.
## Code After:
import pytest
import os.path
from functools import lru_cache
from django.conf import settings
from .fixtures import *
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true", help="run slow tests")
def pytest_runtest_setup(item):
if "slow" in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
@lru_cache(maxsize=4)
def _get_sql():
path = os.path.join(settings.BASE_DIR, "sql", "tags.sql")
with open(path, "r") as f:
return f.read()
def on_db_connect(sender, connection, **kwargs):
cursor = connection.cursor()
cursor.execute(_get_sql())
from django.db.backends import signals
signals.connection_created.connect(on_db_connect)
|
# ... existing code ...
import pytest
import os.path
from functools import lru_cache
from django.conf import settings
from .fixtures import *
# ... modified code ...
def pytest_runtest_setup(item):
if "slow" in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
@lru_cache(maxsize=4)
def _get_sql():
path = os.path.join(settings.BASE_DIR, "sql", "tags.sql")
with open(path, "r") as f:
return f.read()
def on_db_connect(sender, connection, **kwargs):
cursor = connection.cursor()
cursor.execute(_get_sql())
from django.db.backends import signals
signals.connection_created.connect(on_db_connect)
# ... rest of the code ...
|
5ec1ba120642686b87cec0ad2ccc2c1428c4a553
|
samples/config.default.py
|
samples/config.default.py
|
credentials = {
'verify-email.org': {
'username': 'YOURUSERNAME',
'password': 'YOURPASSWORD',
}
'emailhippo.com': {
'api_url': 'https://domain.com/api/v2',
'api_key': 'YOURAPIKEY',
}
}
|
credentials = {
'verify-email.org': {
'username': 'YOURUSERNAME',
'password': 'YOURPASSWORD',
}
'emailhippo.com': {
'api_url': 'https://domain.com/api/v2',
'api_key': 'YOURAPIKEY',
},
'email-validator.net': {
'api_key': '',
},
}
|
Add an other provider to config
|
Add an other provider to config
|
Python
|
bsd-3-clause
|
scls19fr/email-verif
|
python
|
## Code Before:
credentials = {
'verify-email.org': {
'username': 'YOURUSERNAME',
'password': 'YOURPASSWORD',
}
'emailhippo.com': {
'api_url': 'https://domain.com/api/v2',
'api_key': 'YOURAPIKEY',
}
}
## Instruction:
Add an other provider to config
## Code After:
credentials = {
'verify-email.org': {
'username': 'YOURUSERNAME',
'password': 'YOURPASSWORD',
}
'emailhippo.com': {
'api_url': 'https://domain.com/api/v2',
'api_key': 'YOURAPIKEY',
},
'email-validator.net': {
'api_key': '',
},
}
|
// ... existing code ...
'emailhippo.com': {
'api_url': 'https://domain.com/api/v2',
'api_key': 'YOURAPIKEY',
},
'email-validator.net': {
'api_key': '',
},
}
// ... rest of the code ...
|
7818f9aa2d66ab0f4a99f731ecfb03e711e9ad6c
|
utils/send_messages.py
|
utils/send_messages.py
|
from django.conf import settings
import requests
def send_message_android(destination, title, message):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
|
from django.conf import settings
from constance import config
import requests
def send_message_android(destination, message, title=config.TITLE_PUSH_NOTIFICATION):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
def send_message_ios(destination, message, title=config.TITLE_PUSH_NOTIFICATION):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
def send_push_notification(user, message):
try:
devices = user.employeedevice_set.all()
if devices[0].android_device:
send_message_android(devices[0].android_device, message)
if devices[0].ios_device:
send_message_ios(devices[0].ios_device, message)
return True
except:
return False
|
Add send_push_notification function and separate android and ios functions
|
Add send_push_notification function and separate android and ios functions
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
python
|
## Code Before:
from django.conf import settings
import requests
def send_message_android(destination, title, message):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
## Instruction:
Add send_push_notification function and separate android and ios functions
## Code After:
from django.conf import settings
from constance import config
import requests
def send_message_android(destination, message, title=config.TITLE_PUSH_NOTIFICATION):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
def send_message_ios(destination, message, title=config.TITLE_PUSH_NOTIFICATION):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
def send_push_notification(user, message):
try:
devices = user.employeedevice_set.all()
if devices[0].android_device:
send_message_android(devices[0].android_device, message)
if devices[0].ios_device:
send_message_ios(devices[0].ios_device, message)
return True
except:
return False
|
...
from django.conf import settings
from constance import config
import requests
def send_message_android(destination, message, title=config.TITLE_PUSH_NOTIFICATION):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
...
headers=headers
)
print request.text
def send_message_ios(destination, message, title=config.TITLE_PUSH_NOTIFICATION):
headers = {
'Authorization': 'key=' + settings.FIREBASE_SERVER_KEY,
'Content - Type': 'application/json'
}
payload = {
"to": destination,
"notification": {"title": title, "text": message}
}
request = requests.post(
settings.FIREBASE_API_URL,
json=payload,
headers=headers
)
print request.text
def send_push_notification(user, message):
try:
devices = user.employeedevice_set.all()
if devices[0].android_device:
send_message_android(devices[0].android_device, message)
if devices[0].ios_device:
send_message_ios(devices[0].ios_device, message)
return True
except:
return False
...
|
62d7c94968d70564839b32375fac6608720c2a67
|
backend/pycon/urls.py
|
backend/pycon/urls.py
|
from api.views import GraphQLView
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
]
|
from api.views import GraphQLView
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Add media url when running in debug mode
|
Add media url when running in debug mode
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
python
|
## Code Before:
from api.views import GraphQLView
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
]
## Instruction:
Add media url when running in debug mode
## Code After:
from api.views import GraphQLView
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
// ... existing code ...
from api.views import GraphQLView
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
// ... modified code ...
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
// ... rest of the code ...
|
21dea82cd8fc2b7d8889013dfd827f59cc8ceb58
|
acmd/tools/assets.py
|
acmd/tools/assets.py
|
import sys
import os.path
import optparse
import json
import requests
from acmd import tool, log
from acmd import OK, SERVER_ERROR, USER_ERROR
from acmd.props import parse_properties
parser = optparse.OptionParser("acmd assets <import|touch> [options] <file>")
parser.add_option("-r", "--raw",
action="store_const", const=True, dest="raw",
help="output raw response data")
@tool('assets')
class AssetsTool(object):
""" Manage AEM DAM assets """
@staticmethod
def execute(server, argv):
options, args = parser.parse_args(argv)
return OK
def import_file(server, options, filename):
pass
|
import optparse
import os
import requests
from acmd import OK, SERVER_ERROR
from acmd import tool, error, log
from acmd.tools.tool_utils import get_argument, get_command
parser = optparse.OptionParser("acmd assets <import|touch> [options] <file>")
parser.add_option("-r", "--raw",
action="store_const", const=True, dest="raw",
help="output raw response data")
@tool('assets')
class AssetsTool(object):
""" Manage AEM DAM assets """
@staticmethod
def execute(server, argv):
options, args = parser.parse_args(argv)
action = get_command(args)
actionarg = get_argument(args)
if action == 'import':
return import_path(server, options, actionarg)
return OK
def import_path(server, options, path):
if os.path.isdir(path):
return import_directory(server, options, path)
else:
return import_file(server, options, path)
def import_directory(server, options, path):
log("Importing file {}".format(path))
for subdir, dirs, files in os.walk(path):
# _create_dir(server, subdir)
for filename in files:
import_file(server, options, os.path.join(subdir, filename))
def import_file(server, options, filename):
print filename
# curl -s -u admin:admin -X POST -F "jcr:primaryType=sling:OrderedFolder" $HOST$dampath > /dev/null
def _create_dir(server, path):
form_data = {'jcr:primaryType': 'sling:OrderedFolder'}
url = server.url(path)
resp = requests.post(url, auth=server.auth, data=form_data)
if resp.status_code != 201:
error("Failed to create directory {}".format(url))
return SERVER_ERROR
return OK
|
Support iterate of files in dir
|
Support iterate of files in dir
|
Python
|
mit
|
darashenka/aem-cmd,darashenka/aem-cmd,darashenka/aem-cmd
|
python
|
## Code Before:
import sys
import os.path
import optparse
import json
import requests
from acmd import tool, log
from acmd import OK, SERVER_ERROR, USER_ERROR
from acmd.props import parse_properties
parser = optparse.OptionParser("acmd assets <import|touch> [options] <file>")
parser.add_option("-r", "--raw",
action="store_const", const=True, dest="raw",
help="output raw response data")
@tool('assets')
class AssetsTool(object):
""" Manage AEM DAM assets """
@staticmethod
def execute(server, argv):
options, args = parser.parse_args(argv)
return OK
def import_file(server, options, filename):
pass
## Instruction:
Support iterate of files in dir
## Code After:
import optparse
import os
import requests
from acmd import OK, SERVER_ERROR
from acmd import tool, error, log
from acmd.tools.tool_utils import get_argument, get_command
parser = optparse.OptionParser("acmd assets <import|touch> [options] <file>")
parser.add_option("-r", "--raw",
action="store_const", const=True, dest="raw",
help="output raw response data")
@tool('assets')
class AssetsTool(object):
""" Manage AEM DAM assets """
@staticmethod
def execute(server, argv):
options, args = parser.parse_args(argv)
action = get_command(args)
actionarg = get_argument(args)
if action == 'import':
return import_path(server, options, actionarg)
return OK
def import_path(server, options, path):
if os.path.isdir(path):
return import_directory(server, options, path)
else:
return import_file(server, options, path)
def import_directory(server, options, path):
log("Importing file {}".format(path))
for subdir, dirs, files in os.walk(path):
# _create_dir(server, subdir)
for filename in files:
import_file(server, options, os.path.join(subdir, filename))
def import_file(server, options, filename):
print filename
# curl -s -u admin:admin -X POST -F "jcr:primaryType=sling:OrderedFolder" $HOST$dampath > /dev/null
def _create_dir(server, path):
form_data = {'jcr:primaryType': 'sling:OrderedFolder'}
url = server.url(path)
resp = requests.post(url, auth=server.auth, data=form_data)
if resp.status_code != 201:
error("Failed to create directory {}".format(url))
return SERVER_ERROR
return OK
|
// ... existing code ...
import optparse
import os
import requests
from acmd import OK, SERVER_ERROR
from acmd import tool, error, log
from acmd.tools.tool_utils import get_argument, get_command
parser = optparse.OptionParser("acmd assets <import|touch> [options] <file>")
parser.add_option("-r", "--raw",
// ... modified code ...
@staticmethod
def execute(server, argv):
options, args = parser.parse_args(argv)
action = get_command(args)
actionarg = get_argument(args)
if action == 'import':
return import_path(server, options, actionarg)
return OK
def import_path(server, options, path):
if os.path.isdir(path):
return import_directory(server, options, path)
else:
return import_file(server, options, path)
def import_directory(server, options, path):
log("Importing file {}".format(path))
for subdir, dirs, files in os.walk(path):
# _create_dir(server, subdir)
for filename in files:
import_file(server, options, os.path.join(subdir, filename))
def import_file(server, options, filename):
print filename
# curl -s -u admin:admin -X POST -F "jcr:primaryType=sling:OrderedFolder" $HOST$dampath > /dev/null
def _create_dir(server, path):
form_data = {'jcr:primaryType': 'sling:OrderedFolder'}
url = server.url(path)
resp = requests.post(url, auth=server.auth, data=form_data)
if resp.status_code != 201:
error("Failed to create directory {}".format(url))
return SERVER_ERROR
return OK
// ... rest of the code ...
|
3a8985756e68560b5aa84adab988b681e1695f84
|
generate/templates/manual/include/configurable_class_wrapper.h
|
generate/templates/manual/include/configurable_class_wrapper.h
|
namespace nodegit {
class Context;
template<typename Traits>
class ConfigurableClassWrapper : public CleanupHandle {
public:
typedef typename Traits::cType cType;
typedef typename Traits::configurableCppClass configurableCppClass;
struct v8ConversionResult {
v8ConversionResult(std::string _error)
: error(std::move(_error)), result(nullptr)
{}
v8ConversionResult(std::shared_ptr<configurableCppClass> _result)
: result(std::move(_result))
{}
std::string error;
std::shared_ptr<configurableCppClass> result;
};
// We copy the entity
ConfigurableClassWrapper(nodegit::Context *_nodeGitContext)
: nodegitContext(_nodeGitContext), raw(nullptr) {}
virtual ~ConfigurableClassWrapper() {
if (raw != nullptr) {
delete raw;
raw = nullptr;
}
}
const Context *nodegitContext = nullptr;
cType *GetValue() {
return raw;
}
protected:
cType *raw;
std::vector<std::shared_ptr<CleanupHandle>> childCleanupVector;
};
}
#endif
|
namespace nodegit {
class Context;
template<typename Traits>
class ConfigurableClassWrapper : public CleanupHandle {
public:
typedef typename Traits::cType cType;
typedef typename Traits::configurableCppClass configurableCppClass;
struct v8ConversionResult {
v8ConversionResult(std::string _error)
: error(std::move(_error)), result(nullptr)
{}
v8ConversionResult(std::shared_ptr<configurableCppClass> _result)
: result(std::move(_result))
{}
std::string error;
std::shared_ptr<configurableCppClass> result;
};
// We copy the entity
ConfigurableClassWrapper(nodegit::Context *_nodeGitContext)
: nodegitContext(_nodeGitContext), raw(nullptr) {}
ConfigurableClassWrapper(const ConfigurableClassWrapper &) = delete;
ConfigurableClassWrapper(ConfigurableClassWrapper &&) = delete;
ConfigurableClassWrapper &operator=(const ConfigurableClassWrapper &) = delete;
ConfigurableClassWrapper &operator=(ConfigurableClassWrapper &&) = delete;
virtual ~ConfigurableClassWrapper() {
if (raw != nullptr) {
delete raw;
raw = nullptr;
}
}
const Context *nodegitContext = nullptr;
cType *GetValue() {
return raw;
}
protected:
cType *raw;
std::vector<std::shared_ptr<CleanupHandle>> childCleanupVector;
};
}
#endif
|
Delete copy and move constructors for ConfigurableClassWrapper
|
Delete copy and move constructors for ConfigurableClassWrapper
|
C
|
mit
|
jmurzy/nodegit,jmurzy/nodegit,nodegit/nodegit,nodegit/nodegit,jmurzy/nodegit,jmurzy/nodegit,nodegit/nodegit,nodegit/nodegit,nodegit/nodegit,jmurzy/nodegit
|
c
|
## Code Before:
namespace nodegit {
class Context;
template<typename Traits>
class ConfigurableClassWrapper : public CleanupHandle {
public:
typedef typename Traits::cType cType;
typedef typename Traits::configurableCppClass configurableCppClass;
struct v8ConversionResult {
v8ConversionResult(std::string _error)
: error(std::move(_error)), result(nullptr)
{}
v8ConversionResult(std::shared_ptr<configurableCppClass> _result)
: result(std::move(_result))
{}
std::string error;
std::shared_ptr<configurableCppClass> result;
};
// We copy the entity
ConfigurableClassWrapper(nodegit::Context *_nodeGitContext)
: nodegitContext(_nodeGitContext), raw(nullptr) {}
virtual ~ConfigurableClassWrapper() {
if (raw != nullptr) {
delete raw;
raw = nullptr;
}
}
const Context *nodegitContext = nullptr;
cType *GetValue() {
return raw;
}
protected:
cType *raw;
std::vector<std::shared_ptr<CleanupHandle>> childCleanupVector;
};
}
#endif
## Instruction:
Delete copy and move constructors for ConfigurableClassWrapper
## Code After:
namespace nodegit {
class Context;
template<typename Traits>
class ConfigurableClassWrapper : public CleanupHandle {
public:
typedef typename Traits::cType cType;
typedef typename Traits::configurableCppClass configurableCppClass;
struct v8ConversionResult {
v8ConversionResult(std::string _error)
: error(std::move(_error)), result(nullptr)
{}
v8ConversionResult(std::shared_ptr<configurableCppClass> _result)
: result(std::move(_result))
{}
std::string error;
std::shared_ptr<configurableCppClass> result;
};
// We copy the entity
ConfigurableClassWrapper(nodegit::Context *_nodeGitContext)
: nodegitContext(_nodeGitContext), raw(nullptr) {}
ConfigurableClassWrapper(const ConfigurableClassWrapper &) = delete;
ConfigurableClassWrapper(ConfigurableClassWrapper &&) = delete;
ConfigurableClassWrapper &operator=(const ConfigurableClassWrapper &) = delete;
ConfigurableClassWrapper &operator=(ConfigurableClassWrapper &&) = delete;
virtual ~ConfigurableClassWrapper() {
if (raw != nullptr) {
delete raw;
raw = nullptr;
}
}
const Context *nodegitContext = nullptr;
cType *GetValue() {
return raw;
}
protected:
cType *raw;
std::vector<std::shared_ptr<CleanupHandle>> childCleanupVector;
};
}
#endif
|
# ... existing code ...
ConfigurableClassWrapper(nodegit::Context *_nodeGitContext)
: nodegitContext(_nodeGitContext), raw(nullptr) {}
ConfigurableClassWrapper(const ConfigurableClassWrapper &) = delete;
ConfigurableClassWrapper(ConfigurableClassWrapper &&) = delete;
ConfigurableClassWrapper &operator=(const ConfigurableClassWrapper &) = delete;
ConfigurableClassWrapper &operator=(ConfigurableClassWrapper &&) = delete;
virtual ~ConfigurableClassWrapper() {
if (raw != nullptr) {
delete raw;
# ... rest of the code ...
|
fe4fec66cbf4100752c4b7414090019ab8ddb8ce
|
ideascube/conf/idb_bdi.py
|
ideascube/conf/idb_bdi.py
|
"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
|
"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
|
Add cards for Ideasbox in Burundi
|
Add cards for Ideasbox in Burundi
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
python
|
## Code Before:
"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
## Instruction:
Add cards for Ideasbox in Burundi
## Code After:
"""Generic config for Ideasbox of Burundi"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['refugee_id', 'short_name', 'full_name', 'birth_year', 'gender', 'phone']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Origin'), ['country', 'city', 'country_of_origin_occupation', 'school_level', 'is_sent_to_school']), # noqa
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
|
...
(_('Language skills'), ['rn_level', 'sw_level', 'fr_level']),
(_('National residents'), ['id_card_number']),
)
STAFF_HOME_CARDS = [c for c in STAFF_HOME_CARDS
if c['url'] in ['user_list', 'server:power',
'server:backup']]
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
},
{
'id': 'khanacademy',
},
{
'id': 'vikidia',
},
{
'id': 'gutenberg',
},
{
'id': 'cpassorcier',
},
{
'id': 'ted',
},
]
...
|
ee40c7b87488ad9fe3131863bc586cbe5547c7ae
|
app/src/main/java/net/squanchy/support/lang/Options.kt
|
app/src/main/java/net/squanchy/support/lang/Options.kt
|
package net.squanchy.support.lang
import arrow.core.Option
import arrow.core.getOrElse
fun <T> Option<T>.or(value: T): T {
return this.getOrElse { value }
}
fun <T> Option<T>.getOrThrow(): T {
return this.getOrElse { throw IllegalStateException("You must check if data is present before using get()") }
}
|
package net.squanchy.support.lang
import arrow.core.Option
import arrow.core.getOrElse
fun <T> Option<T>.or(value: T): T = this.getOrElse { value }
fun <T> Option<T>.getOrThrow(): T = this.getOrElse {
throw IllegalStateException("You must check if data is present before using get()")
}
fun <T> T?.option(): Option<T> = Option.fromNullable(this)
|
Add option() extension fun to create optionals from nullables
|
Add option() extension fun to create optionals from nullables
|
Kotlin
|
apache-2.0
|
squanchy-dev/squanchy-android,squanchy-dev/squanchy-android,squanchy-dev/squanchy-android
|
kotlin
|
## Code Before:
package net.squanchy.support.lang
import arrow.core.Option
import arrow.core.getOrElse
fun <T> Option<T>.or(value: T): T {
return this.getOrElse { value }
}
fun <T> Option<T>.getOrThrow(): T {
return this.getOrElse { throw IllegalStateException("You must check if data is present before using get()") }
}
## Instruction:
Add option() extension fun to create optionals from nullables
## Code After:
package net.squanchy.support.lang
import arrow.core.Option
import arrow.core.getOrElse
fun <T> Option<T>.or(value: T): T = this.getOrElse { value }
fun <T> Option<T>.getOrThrow(): T = this.getOrElse {
throw IllegalStateException("You must check if data is present before using get()")
}
fun <T> T?.option(): Option<T> = Option.fromNullable(this)
|
# ... existing code ...
import arrow.core.Option
import arrow.core.getOrElse
fun <T> Option<T>.or(value: T): T = this.getOrElse { value }
fun <T> Option<T>.getOrThrow(): T = this.getOrElse {
throw IllegalStateException("You must check if data is present before using get()")
}
fun <T> T?.option(): Option<T> = Option.fromNullable(this)
# ... rest of the code ...
|
67179d1cf8dab528bc418cdaff71446698a9bd51
|
setup.py
|
setup.py
|
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
Fix attempt: version specifier ~= is not supported on older installations of pip
|
Fix attempt: version specifier ~= is not supported on older installations of pip
|
Python
|
mit
|
Asana/python-asana,asana/python-asana,asana/python-asana
|
python
|
## Code Before:
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
## Instruction:
Fix attempt: version specifier ~= is not supported on older installations of pip
## Code After:
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
...
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
...
|
a6ae05c13666b83a1f1a8707fe21972bd1f758d9
|
walltime.py
|
walltime.py
|
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
if len(sys.argv) < 2:
print 'USAGE: walltime filename'
else:
fname = sys.argv[-1]
log_file = np.genfromtxt(fname, comments='#', delimiter=' ')
walltime_total = datetime.timedelta(seconds = log_file[:,-1].sum())
walltime_avg = datetime.timedelta(seconds = log_file[:,-1].mean())
print 'Total walltime: '
print str(walltime_total)
print 'Average walltime per step:'
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
plt.show()
|
import time
t0 = time.time()
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
t1 = time.time()
print 'Importing took {} s'.format(t1-t0)
if len(sys.argv) < 2:
print 'USAGE: walltime filename'
else:
fname = sys.argv[-1]
log_file = np.genfromtxt(fname, comments='#', delimiter=' ')
walltime_total = datetime.timedelta(seconds = log_file[:,-1].sum())
walltime_avg = datetime.timedelta(seconds = log_file[:,-1].mean())
print 'Total walltime: '
print str(walltime_total)
print 'Average walltime per step:'
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
t2 = time.time()
print 'Running took an extra {} s'.format(t2-t1)
print 'For a total of {} s'.format(t2 - t0)
plt.show()
|
Print statements added for profiling
|
Print statements added for profiling
|
Python
|
mit
|
ibackus/custom_python_packages,trquinn/custom_python_packages
|
python
|
## Code Before:
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
if len(sys.argv) < 2:
print 'USAGE: walltime filename'
else:
fname = sys.argv[-1]
log_file = np.genfromtxt(fname, comments='#', delimiter=' ')
walltime_total = datetime.timedelta(seconds = log_file[:,-1].sum())
walltime_avg = datetime.timedelta(seconds = log_file[:,-1].mean())
print 'Total walltime: '
print str(walltime_total)
print 'Average walltime per step:'
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
plt.show()
## Instruction:
Print statements added for profiling
## Code After:
import time
t0 = time.time()
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
t1 = time.time()
print 'Importing took {} s'.format(t1-t0)
if len(sys.argv) < 2:
print 'USAGE: walltime filename'
else:
fname = sys.argv[-1]
log_file = np.genfromtxt(fname, comments='#', delimiter=' ')
walltime_total = datetime.timedelta(seconds = log_file[:,-1].sum())
walltime_avg = datetime.timedelta(seconds = log_file[:,-1].mean())
print 'Total walltime: '
print str(walltime_total)
print 'Average walltime per step:'
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
t2 = time.time()
print 'Running took an extra {} s'.format(t2-t1)
print 'For a total of {} s'.format(t2 - t0)
plt.show()
|
# ... existing code ...
import time
t0 = time.time()
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
t1 = time.time()
print 'Importing took {} s'.format(t1-t0)
if len(sys.argv) < 2:
# ... modified code ...
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
t2 = time.time()
print 'Running took an extra {} s'.format(t2-t1)
print 'For a total of {} s'.format(t2 - t0)
plt.show()
# ... rest of the code ...
|
124ca0d847bd9c7be36389d15cbc81645dcd2bd0
|
agent/src/main/java/com/github/sulir/runtimesearch/runtime/Check.java
|
agent/src/main/java/com/github/sulir/runtimesearch/runtime/Check.java
|
package com.github.sulir.runtimesearch.runtime;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
public class Check {
public static final int PORT = 4321;
public static String searchValue;
public static void initialize() {
try {
throw new BreakpointError();
} catch (BreakpointError e) { }
}
public static void runServer() {
try {
ServerSocket server = new ServerSocket(PORT, 0, InetAddress.getLoopbackAddress());
Thread thread = new Thread(() -> {
while (true) {
try {
Socket client = server.accept();
ObjectInputStream input = new ObjectInputStream(client.getInputStream());
searchValue = (String) input.readObject();
client.close();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
}
});
thread.setDaemon(true);
thread.start();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void perform(Object object) {
if (searchValue == null)
return;
if (object instanceof String) {
String string = (String) object;
if (string.contains(searchValue)) {
try {
searchValue = null;
throw new BreakpointError();
} catch (BreakpointError e) { }
}
}
}
}
|
package com.github.sulir.runtimesearch.runtime;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
public class Check {
public static final int PORT = 4321;
public static String searchValue;
public static void initialize() {
try {
throw new BreakpointError();
} catch (BreakpointError e) {
// exception thrown to trigger a breakpoint in the IDE
}
}
public static void runServer() {
try {
ServerSocket server = new ServerSocket(PORT, 0, InetAddress.getLoopbackAddress());
Thread thread = new Thread(() -> {
while (true) {
try {
Socket client = server.accept();
ObjectInputStream input = new ObjectInputStream(client.getInputStream());
searchValue = (String) input.readObject();
client.close();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
}
});
thread.setDaemon(true);
thread.start();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void perform(Object object) {
if (searchValue == null)
return;
if (object instanceof String) {
String string = (String) object;
if (string.contains(searchValue)) {
try {
searchValue = null;
throw new BreakpointError();
} catch (BreakpointError e) {
// exception thrown to trigger a breakpoint in the IDE
}
}
}
}
}
|
Fix IDE warnings in the agent
|
Fix IDE warnings in the agent
|
Java
|
apache-2.0
|
sulir/runtimesearch
|
java
|
## Code Before:
package com.github.sulir.runtimesearch.runtime;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
public class Check {
public static final int PORT = 4321;
public static String searchValue;
public static void initialize() {
try {
throw new BreakpointError();
} catch (BreakpointError e) { }
}
public static void runServer() {
try {
ServerSocket server = new ServerSocket(PORT, 0, InetAddress.getLoopbackAddress());
Thread thread = new Thread(() -> {
while (true) {
try {
Socket client = server.accept();
ObjectInputStream input = new ObjectInputStream(client.getInputStream());
searchValue = (String) input.readObject();
client.close();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
}
});
thread.setDaemon(true);
thread.start();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void perform(Object object) {
if (searchValue == null)
return;
if (object instanceof String) {
String string = (String) object;
if (string.contains(searchValue)) {
try {
searchValue = null;
throw new BreakpointError();
} catch (BreakpointError e) { }
}
}
}
}
## Instruction:
Fix IDE warnings in the agent
## Code After:
package com.github.sulir.runtimesearch.runtime;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
public class Check {
public static final int PORT = 4321;
public static String searchValue;
public static void initialize() {
try {
throw new BreakpointError();
} catch (BreakpointError e) {
// exception thrown to trigger a breakpoint in the IDE
}
}
public static void runServer() {
try {
ServerSocket server = new ServerSocket(PORT, 0, InetAddress.getLoopbackAddress());
Thread thread = new Thread(() -> {
while (true) {
try {
Socket client = server.accept();
ObjectInputStream input = new ObjectInputStream(client.getInputStream());
searchValue = (String) input.readObject();
client.close();
} catch (IOException | ClassNotFoundException e) {
e.printStackTrace();
}
}
});
thread.setDaemon(true);
thread.start();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void perform(Object object) {
if (searchValue == null)
return;
if (object instanceof String) {
String string = (String) object;
if (string.contains(searchValue)) {
try {
searchValue = null;
throw new BreakpointError();
} catch (BreakpointError e) {
// exception thrown to trigger a breakpoint in the IDE
}
}
}
}
}
|
# ... existing code ...
public static void initialize() {
try {
throw new BreakpointError();
} catch (BreakpointError e) {
// exception thrown to trigger a breakpoint in the IDE
}
}
public static void runServer() {
# ... modified code ...
try {
searchValue = null;
throw new BreakpointError();
} catch (BreakpointError e) {
// exception thrown to trigger a breakpoint in the IDE
}
}
}
}
# ... rest of the code ...
|
922128855ca81ca6d5cad13df91eb312e81057b9
|
GITBlob.h
|
GITBlob.h
|
//
// GITBlob.h
// CocoaGit
//
// Created by Geoffrey Garside on 29/06/2008.
// Copyright 2008 ManicPanda.com. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "GITObject.h"
@interface GITBlob : GITObject {
NSData * data;
}
#pragma mark -
#pragma mark Properties
@property(retain) NSData * data;
#pragma mark -
#pragma mark Init Methods
- (id)initWithContentsOfFile:(NSString*)filePath;
- (id)initWithData:(NSData*)dataContent;
#pragma mark -
#pragma mark Instance Methods
- (BOOL)write;
- (BOOL)writeWithError:(NSError**)errorPtr;
@end
|
//
// GITBlob.h
// CocoaGit
//
// Created by Geoffrey Garside on 29/06/2008.
// Copyright 2008 ManicPanda.com. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "GITObject.h"
@interface GITBlob : GITObject {
NSData * data;
}
#pragma mark -
#pragma mark Properties
@property(retain) NSData * data;
#pragma mark -
#pragma mark Reading existing Blob objects
- (id)initFromHash:(NSString*)objectHash;
#pragma mark -
#pragma mark Creating new Blob objects
- (id)initWithData:(NSData*)dataContent;
- (id)initWithContentsOfFile:(NSString*)filePath;
#pragma mark -
#pragma mark Instance Methods
- (BOOL)write;
- (BOOL)writeWithError:(NSError**)errorPtr;
@end
|
Add pragmas to differentiate init method types
|
Add pragmas to differentiate init method types
|
C
|
mit
|
schacon/cocoagit,schacon/cocoagit,geoffgarside/cocoagit,geoffgarside/cocoagit
|
c
|
## Code Before:
//
// GITBlob.h
// CocoaGit
//
// Created by Geoffrey Garside on 29/06/2008.
// Copyright 2008 ManicPanda.com. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "GITObject.h"
@interface GITBlob : GITObject {
NSData * data;
}
#pragma mark -
#pragma mark Properties
@property(retain) NSData * data;
#pragma mark -
#pragma mark Init Methods
- (id)initWithContentsOfFile:(NSString*)filePath;
- (id)initWithData:(NSData*)dataContent;
#pragma mark -
#pragma mark Instance Methods
- (BOOL)write;
- (BOOL)writeWithError:(NSError**)errorPtr;
@end
## Instruction:
Add pragmas to differentiate init method types
## Code After:
//
// GITBlob.h
// CocoaGit
//
// Created by Geoffrey Garside on 29/06/2008.
// Copyright 2008 ManicPanda.com. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "GITObject.h"
@interface GITBlob : GITObject {
NSData * data;
}
#pragma mark -
#pragma mark Properties
@property(retain) NSData * data;
#pragma mark -
#pragma mark Reading existing Blob objects
- (id)initFromHash:(NSString*)objectHash;
#pragma mark -
#pragma mark Creating new Blob objects
- (id)initWithData:(NSData*)dataContent;
- (id)initWithContentsOfFile:(NSString*)filePath;
#pragma mark -
#pragma mark Instance Methods
- (BOOL)write;
- (BOOL)writeWithError:(NSError**)errorPtr;
@end
|
...
@property(retain) NSData * data;
#pragma mark -
#pragma mark Reading existing Blob objects
- (id)initFromHash:(NSString*)objectHash;
#pragma mark -
#pragma mark Creating new Blob objects
- (id)initWithData:(NSData*)dataContent;
- (id)initWithContentsOfFile:(NSString*)filePath;
#pragma mark -
#pragma mark Instance Methods
...
|
eb893151d12f81f1ebe388f0b4ae650aa6f6552c
|
ticketing/__init__.py
|
ticketing/__init__.py
|
try:
VERSION = __import__('pkg_resources') \
.get_distribution('django-ticketing').version
except Exception, e:
VERSION = 'unknown'
|
VERSION = (0, 6, 0, 'final', 0)
|
Change the version string so it doesn't cause any errors.
|
Change the version string so it doesn't cause any errors.
|
Python
|
mit
|
streeter/django-ticketing
|
python
|
## Code Before:
try:
VERSION = __import__('pkg_resources') \
.get_distribution('django-ticketing').version
except Exception, e:
VERSION = 'unknown'
## Instruction:
Change the version string so it doesn't cause any errors.
## Code After:
VERSION = (0, 6, 0, 'final', 0)
|
...
VERSION = (0, 6, 0, 'final', 0)
...
|
09225071761ae059c46393d41180b6c37d1b3edc
|
portal/models/locale.py
|
portal/models/locale.py
|
from .coding import Coding
from .lazy import lazyprop
from ..system_uri import IETF_LANGUAGE_TAG
class LocaleConstants(object):
"""Attributes for built in locales
Additions may be defined in persistence files, base values defined
within for easy access and testing
"""
def __iter__(self):
for attr in dir(self):
if attr.startswith('_'):
continue
yield getattr(self, attr)
@lazyprop
def AmericanEnglish(self):
Coding(
system=IETF_LANGUAGE_TAG, code='en_US',
display='American English').add_if_not_found(True)
@lazyprop
def AustralianEnglish(self):
Coding(
system=IETF_LANGUAGE_TAG, code='en_AU',
display='Australian English').add_if_not_found(True)
|
from .coding import Coding
from .lazy import lazyprop
from ..system_uri import IETF_LANGUAGE_TAG
class LocaleConstants(object):
"""Attributes for built in locales
Additions may be defined in persistence files, base values defined
within for easy access and testing
"""
def __iter__(self):
for attr in dir(self):
if attr.startswith('_'):
continue
yield getattr(self, attr)
@lazyprop
def AmericanEnglish(self):
return Coding(
system=IETF_LANGUAGE_TAG, code='en_US',
display='American English').add_if_not_found(True)
@lazyprop
def AustralianEnglish(self):
return Coding(
system=IETF_LANGUAGE_TAG, code='en_AU',
display='Australian English').add_if_not_found(True)
|
Correct coding error - need to return coding from property function or it'll cache None.
|
Correct coding error - need to return coding from property function or it'll cache None.
|
Python
|
bsd-3-clause
|
uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal
|
python
|
## Code Before:
from .coding import Coding
from .lazy import lazyprop
from ..system_uri import IETF_LANGUAGE_TAG
class LocaleConstants(object):
"""Attributes for built in locales
Additions may be defined in persistence files, base values defined
within for easy access and testing
"""
def __iter__(self):
for attr in dir(self):
if attr.startswith('_'):
continue
yield getattr(self, attr)
@lazyprop
def AmericanEnglish(self):
Coding(
system=IETF_LANGUAGE_TAG, code='en_US',
display='American English').add_if_not_found(True)
@lazyprop
def AustralianEnglish(self):
Coding(
system=IETF_LANGUAGE_TAG, code='en_AU',
display='Australian English').add_if_not_found(True)
## Instruction:
Correct coding error - need to return coding from property function or it'll cache None.
## Code After:
from .coding import Coding
from .lazy import lazyprop
from ..system_uri import IETF_LANGUAGE_TAG
class LocaleConstants(object):
"""Attributes for built in locales
Additions may be defined in persistence files, base values defined
within for easy access and testing
"""
def __iter__(self):
for attr in dir(self):
if attr.startswith('_'):
continue
yield getattr(self, attr)
@lazyprop
def AmericanEnglish(self):
return Coding(
system=IETF_LANGUAGE_TAG, code='en_US',
display='American English').add_if_not_found(True)
@lazyprop
def AustralianEnglish(self):
return Coding(
system=IETF_LANGUAGE_TAG, code='en_AU',
display='Australian English').add_if_not_found(True)
|
// ... existing code ...
within for easy access and testing
"""
def __iter__(self):
for attr in dir(self):
if attr.startswith('_'):
// ... modified code ...
@lazyprop
def AmericanEnglish(self):
return Coding(
system=IETF_LANGUAGE_TAG, code='en_US',
display='American English').add_if_not_found(True)
@lazyprop
def AustralianEnglish(self):
return Coding(
system=IETF_LANGUAGE_TAG, code='en_AU',
display='Australian English').add_if_not_found(True)
// ... rest of the code ...
|
72f23c104a28fe4c91d5d36d3f939e110c6f16e3
|
exercises/chapter_04/exercise_04_01/exercise_04_01.py
|
exercises/chapter_04/exercise_04_01/exercise_04_01.py
|
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
|
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
Add final version of exercise 4.1.
|
Add final version of exercise 4.1.
|
Python
|
mit
|
HenrikSamuelsson/python-crash-course
|
python
|
## Code Before:
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
## Instruction:
Add final version of exercise 4.1.
## Code After:
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
...
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
...
|
f894aff53577fb459bfac1802f3880133e4143cf
|
build/build.py
|
build/build.py
|
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
|
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
|
Add classes to run ./configure
|
Add classes to run ./configure
|
Python
|
apache-2.0
|
fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary
|
python
|
## Code Before:
import os
import shutil
import util
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
## Instruction:
Add classes to run ./configure
## Code After:
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
def doBuild(self, dir):
os.system("cd %s; make" % dir)
class MakeInstall:
def doInstall(self, dir, root):
os.system("cd %s; make %s=%s install" % (dir, self.rootVar, root))
def __init__(self, rootVar = "DESTDIR"):
self.rootVar = rootVar
class InstallFile:
def doInstall(self, dir, root):
dest = root + self.toFile
util.mkdirChain(os.path.dirname(dest))
shutil.copyfile(self.toFile, dest)
os.chmod(dest, self.mode)
def __init__(self, fromFile, toFile, perms = 0644):
self.toFile = toFile
self.file = fromFile
self.mode = perms
|
# ... existing code ...
import os
import shutil
import util
class ManualConfigure:
def doBuild(self, dir):
os.system("cd %s; ./configure %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Configure:
def doBuild(self, dir):
os.system("cd %s; ./configure --prefix=/usr --sysconfdir=/etc %s" % (dir, self.extraflags))
def __init__(self, extraflags=""):
self.extraflags = extraflags
class Make:
# ... rest of the code ...
|
c379a9922b8e26c90034d02b3d13c6f8ab5c63ee
|
src/main/java/be/yildiz/shared/entity/EntityCreator.java
|
src/main/java/be/yildiz/shared/entity/EntityCreator.java
|
/*
* This file is part of the Yildiz-Engine project, licenced under the MIT License (MIT)
*
* Copyright (c) 2017 Grégory Van den Borre
*
* More infos available: https://www.yildiz-games.be
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package be.yildiz.shared.entity;
/**
* @author Grégory Van den Borre
*/
public interface EntityCreator<T extends Entity> {
T create(EntityToCreate e);
}
|
/*
* This file is part of the Yildiz-Engine project, licenced under the MIT License (MIT)
*
* Copyright (c) 2017 Grégory Van den Borre
*
* More infos available: https://www.yildiz-games.be
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package be.yildiz.shared.entity;
/**
* @author Grégory Van den Borre
*/
@FunctionalInterface
public interface EntityCreator<T extends Entity> {
T create(EntityToCreate e);
}
|
Fix sonar: add functional interface.
|
[YIL-83] Fix sonar: add functional interface.
|
Java
|
mit
|
yildiz-online/engine-shared
|
java
|
## Code Before:
/*
* This file is part of the Yildiz-Engine project, licenced under the MIT License (MIT)
*
* Copyright (c) 2017 Grégory Van den Borre
*
* More infos available: https://www.yildiz-games.be
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package be.yildiz.shared.entity;
/**
* @author Grégory Van den Borre
*/
public interface EntityCreator<T extends Entity> {
T create(EntityToCreate e);
}
## Instruction:
[YIL-83] Fix sonar: add functional interface.
## Code After:
/*
* This file is part of the Yildiz-Engine project, licenced under the MIT License (MIT)
*
* Copyright (c) 2017 Grégory Van den Borre
*
* More infos available: https://www.yildiz-games.be
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
* OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package be.yildiz.shared.entity;
/**
* @author Grégory Van den Borre
*/
@FunctionalInterface
public interface EntityCreator<T extends Entity> {
T create(EntityToCreate e);
}
|
// ... existing code ...
/**
* @author Grégory Van den Borre
*/
@FunctionalInterface
public interface EntityCreator<T extends Entity> {
T create(EntityToCreate e);
// ... rest of the code ...
|
e5a397033c5720cd7d0ab321c05a8f1d12f4dc99
|
tm/tmux_wrapper.py
|
tm/tmux_wrapper.py
|
import subprocess
class SessionExists(Exception):
description = "Session already exists."
pass
class ServerConnectionError(Exception):
description = "tmux server is not currently running."
pass
class SessionDoesNotExist(Exception):
description = "Session does not exist."
pass
def command(command):
p = subprocess.Popen("tmux " + command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
return p.communicate()
def kill(session):
p = subprocess.Popen("tmux kill-session -t {}".format(session),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "session not found" in err:
raise SessionDoesNotExist(session)
if "failed to connect to server" in err:
raise ServerConnectionError()
def list():
p = subprocess.Popen("tmux ls",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "failed to connect to server" in err:
raise ServerConnectionError()
return out
def create(session):
p = subprocess.Popen("tmux new -s {}".format(session),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "duplicate session" in err:
raise SessionExists(session)
def attach(session):
p = subprocess.Popen("tmux attach-session -t {}".format(session),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "no sessions" in err:
raise SessionDoesNotExist(session)
def create_or_attach(session):
create(session)
except SessionExists:
attach(session)
|
import subprocess
class SessionExists(Exception):
description = "Session already exists."
pass
class ServerConnectionError(Exception):
description = "tmux server is not currently running."
pass
class SessionDoesNotExist(Exception):
description = "Session does not exist."
pass
def command(command):
p = subprocess.Popen("tmux " + command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
return p.communicate()
def kill(session):
out, err = command("kill-session -t {}".format(session))
if "session not found" in err:
raise SessionDoesNotExist(session)
if "failed to connect to server" in err:
raise ServerConnectionError()
def list():
out, err = command("ls")
if "failed to connect to server" in err:
raise ServerConnectionError()
return out
def create(session):
out, err = command("new -s {}".format(session))
if "duplicate session" in err:
raise SessionExists(session)
def attach(session):
out, err = command("attach-session -t {}".format(session))
if "no sessions" in err:
raise SessionDoesNotExist(session)
def create_or_attach(session):
try:
create(session)
except SessionExists:
attach(session)
|
Use raw command method to run all commands in wrapper
|
Use raw command method to run all commands in wrapper
|
Python
|
mit
|
ethanal/tm
|
python
|
## Code Before:
import subprocess
class SessionExists(Exception):
description = "Session already exists."
pass
class ServerConnectionError(Exception):
description = "tmux server is not currently running."
pass
class SessionDoesNotExist(Exception):
description = "Session does not exist."
pass
def command(command):
p = subprocess.Popen("tmux " + command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
return p.communicate()
def kill(session):
p = subprocess.Popen("tmux kill-session -t {}".format(session),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "session not found" in err:
raise SessionDoesNotExist(session)
if "failed to connect to server" in err:
raise ServerConnectionError()
def list():
p = subprocess.Popen("tmux ls",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "failed to connect to server" in err:
raise ServerConnectionError()
return out
def create(session):
p = subprocess.Popen("tmux new -s {}".format(session),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "duplicate session" in err:
raise SessionExists(session)
def attach(session):
p = subprocess.Popen("tmux attach-session -t {}".format(session),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
if "no sessions" in err:
raise SessionDoesNotExist(session)
def create_or_attach(session):
create(session)
except SessionExists:
attach(session)
## Instruction:
Use raw command method to run all commands in wrapper
## Code After:
import subprocess
class SessionExists(Exception):
description = "Session already exists."
pass
class ServerConnectionError(Exception):
description = "tmux server is not currently running."
pass
class SessionDoesNotExist(Exception):
description = "Session does not exist."
pass
def command(command):
p = subprocess.Popen("tmux " + command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
return p.communicate()
def kill(session):
out, err = command("kill-session -t {}".format(session))
if "session not found" in err:
raise SessionDoesNotExist(session)
if "failed to connect to server" in err:
raise ServerConnectionError()
def list():
out, err = command("ls")
if "failed to connect to server" in err:
raise ServerConnectionError()
return out
def create(session):
out, err = command("new -s {}".format(session))
if "duplicate session" in err:
raise SessionExists(session)
def attach(session):
out, err = command("attach-session -t {}".format(session))
if "no sessions" in err:
raise SessionDoesNotExist(session)
def create_or_attach(session):
try:
create(session)
except SessionExists:
attach(session)
|
// ... existing code ...
return p.communicate()
def kill(session):
out, err = command("kill-session -t {}".format(session))
if "session not found" in err:
raise SessionDoesNotExist(session)
// ... modified code ...
def list():
out, err = command("ls")
if "failed to connect to server" in err:
raise ServerConnectionError()
...
return out
def create(session):
out, err = command("new -s {}".format(session))
if "duplicate session" in err:
raise SessionExists(session)
...
def attach(session):
out, err = command("attach-session -t {}".format(session))
if "no sessions" in err:
raise SessionDoesNotExist(session)
...
def create_or_attach(session):
try:
create(session)
except SessionExists:
attach(session)
// ... rest of the code ...
|
703cdca6725438b55bf544962ce0c554598697be
|
shoop/admin/templatetags/shoop_admin.py
|
shoop/admin/templatetags/shoop_admin.py
|
from bootstrap3.renderers import FormRenderer
from django.utils.safestring import mark_safe
from django_jinja import library
from shoop.admin.template_helpers import shoop_admin as shoop_admin_template_helpers
from shoop.admin.utils.bs3_renderers import AdminFieldRenderer
class Bootstrap3Namespace(object):
def field(self, field, **kwargs):
if not field:
return ""
return mark_safe(AdminFieldRenderer(field, **kwargs).render())
def form(self, form, **kwargs):
return mark_safe(FormRenderer(form, **kwargs).render())
library.global_function(name="shoop_admin", fn=shoop_admin_template_helpers)
library.global_function(name="bs3", fn=Bootstrap3Namespace())
|
from bootstrap3.renderers import FormRenderer
from django.utils.safestring import mark_safe
from django_jinja import library
from shoop.admin.template_helpers import shoop_admin as shoop_admin_template_helpers
from shoop.admin.utils.bs3_renderers import AdminFieldRenderer
class Bootstrap3Namespace(object):
def field(self, field, **kwargs):
if not field:
return ""
return mark_safe(AdminFieldRenderer(field, **kwargs).render())
def form(self, form, **kwargs):
return mark_safe(FormRenderer(form, **kwargs).render())
def datetime_field(self, field, **kwargs):
kwargs.setdefault("widget_class", "datetime")
kwargs.setdefault("addon_after", "<span class='fa fa-calendar'></span>")
return self.field(field, **kwargs)
library.global_function(name="shoop_admin", fn=shoop_admin_template_helpers)
library.global_function(name="bs3", fn=Bootstrap3Namespace())
|
Add template helper for datetime fields
|
Admin: Add template helper for datetime fields
Refs SHOOP-1612
|
Python
|
agpl-3.0
|
suutari-ai/shoop,shoopio/shoop,suutari/shoop,jorge-marques/shoop,hrayr-artunyan/shuup,taedori81/shoop,shawnadelic/shuup,shawnadelic/shuup,shawnadelic/shuup,suutari-ai/shoop,shoopio/shoop,taedori81/shoop,akx/shoop,suutari/shoop,akx/shoop,shoopio/shoop,hrayr-artunyan/shuup,suutari-ai/shoop,suutari/shoop,hrayr-artunyan/shuup,jorge-marques/shoop,jorge-marques/shoop,taedori81/shoop,akx/shoop
|
python
|
## Code Before:
from bootstrap3.renderers import FormRenderer
from django.utils.safestring import mark_safe
from django_jinja import library
from shoop.admin.template_helpers import shoop_admin as shoop_admin_template_helpers
from shoop.admin.utils.bs3_renderers import AdminFieldRenderer
class Bootstrap3Namespace(object):
def field(self, field, **kwargs):
if not field:
return ""
return mark_safe(AdminFieldRenderer(field, **kwargs).render())
def form(self, form, **kwargs):
return mark_safe(FormRenderer(form, **kwargs).render())
library.global_function(name="shoop_admin", fn=shoop_admin_template_helpers)
library.global_function(name="bs3", fn=Bootstrap3Namespace())
## Instruction:
Admin: Add template helper for datetime fields
Refs SHOOP-1612
## Code After:
from bootstrap3.renderers import FormRenderer
from django.utils.safestring import mark_safe
from django_jinja import library
from shoop.admin.template_helpers import shoop_admin as shoop_admin_template_helpers
from shoop.admin.utils.bs3_renderers import AdminFieldRenderer
class Bootstrap3Namespace(object):
def field(self, field, **kwargs):
if not field:
return ""
return mark_safe(AdminFieldRenderer(field, **kwargs).render())
def form(self, form, **kwargs):
return mark_safe(FormRenderer(form, **kwargs).render())
def datetime_field(self, field, **kwargs):
kwargs.setdefault("widget_class", "datetime")
kwargs.setdefault("addon_after", "<span class='fa fa-calendar'></span>")
return self.field(field, **kwargs)
library.global_function(name="shoop_admin", fn=shoop_admin_template_helpers)
library.global_function(name="bs3", fn=Bootstrap3Namespace())
|
# ... existing code ...
def form(self, form, **kwargs):
return mark_safe(FormRenderer(form, **kwargs).render())
def datetime_field(self, field, **kwargs):
kwargs.setdefault("widget_class", "datetime")
kwargs.setdefault("addon_after", "<span class='fa fa-calendar'></span>")
return self.field(field, **kwargs)
library.global_function(name="shoop_admin", fn=shoop_admin_template_helpers)
library.global_function(name="bs3", fn=Bootstrap3Namespace())
# ... rest of the code ...
|
940d206c46916b1cb365acc70dc0081ef879e18d
|
src/main/java/org/javarosa/xpath/expr/XPathCountFunc.java
|
src/main/java/org/javarosa/xpath/expr/XPathCountFunc.java
|
package org.javarosa.xpath.expr;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.DataInstance;
import org.javarosa.xpath.XPathNodeset;
import org.javarosa.xpath.XPathTypeMismatchException;
import org.javarosa.xpath.parser.XPathSyntaxException;
public class XPathCountFunc extends XPathFuncExpr {
public static final String NAME = "count";
private static final int EXPECTED_ARG_COUNT = 1;
public XPathCountFunc() {
name = NAME;
expectedArgCount = EXPECTED_ARG_COUNT;
}
public XPathCountFunc(XPathExpression[] args) throws XPathSyntaxException {
super(NAME, args, EXPECTED_ARG_COUNT, true);
}
@Override
public Object evalBody(DataInstance model, EvaluationContext evalContext, Object[] evaluatedArgs) {
if (evaluatedArgs[0] instanceof XPathNodeset) {
return new Double(((XPathNodeset)evaluatedArgs[0]).size());
} else {
throw new XPathTypeMismatchException("not a nodeset");
}
}
}
|
package org.javarosa.xpath.expr;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.DataInstance;
import org.javarosa.xpath.XPathNodeset;
import org.javarosa.xpath.XPathTypeMismatchException;
import org.javarosa.xpath.parser.XPathSyntaxException;
public class XPathCountFunc extends XPathFuncExpr {
public static final String NAME = "count";
private static final int EXPECTED_ARG_COUNT = 1;
public XPathCountFunc() {
name = NAME;
expectedArgCount = EXPECTED_ARG_COUNT;
}
public XPathCountFunc(XPathExpression[] args) throws XPathSyntaxException {
super(NAME, args, EXPECTED_ARG_COUNT, true);
}
@Override
public Object evalBody(DataInstance model, EvaluationContext evalContext, Object[] evaluatedArgs) {
if (evaluatedArgs[0] instanceof XPathNodeset) {
return new Double(((XPathNodeset)evaluatedArgs[0]).size());
} else {
throw new XPathTypeMismatchException("uses an invalid reference inside a count function");
}
}
}
|
Update error text to be more descriptive
|
Update error text to be more descriptive
|
Java
|
apache-2.0
|
dimagi/commcare-core,dimagi/commcare,dimagi/commcare-core,dimagi/commcare,dimagi/commcare-core,dimagi/commcare
|
java
|
## Code Before:
package org.javarosa.xpath.expr;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.DataInstance;
import org.javarosa.xpath.XPathNodeset;
import org.javarosa.xpath.XPathTypeMismatchException;
import org.javarosa.xpath.parser.XPathSyntaxException;
public class XPathCountFunc extends XPathFuncExpr {
public static final String NAME = "count";
private static final int EXPECTED_ARG_COUNT = 1;
public XPathCountFunc() {
name = NAME;
expectedArgCount = EXPECTED_ARG_COUNT;
}
public XPathCountFunc(XPathExpression[] args) throws XPathSyntaxException {
super(NAME, args, EXPECTED_ARG_COUNT, true);
}
@Override
public Object evalBody(DataInstance model, EvaluationContext evalContext, Object[] evaluatedArgs) {
if (evaluatedArgs[0] instanceof XPathNodeset) {
return new Double(((XPathNodeset)evaluatedArgs[0]).size());
} else {
throw new XPathTypeMismatchException("not a nodeset");
}
}
}
## Instruction:
Update error text to be more descriptive
## Code After:
package org.javarosa.xpath.expr;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.DataInstance;
import org.javarosa.xpath.XPathNodeset;
import org.javarosa.xpath.XPathTypeMismatchException;
import org.javarosa.xpath.parser.XPathSyntaxException;
public class XPathCountFunc extends XPathFuncExpr {
public static final String NAME = "count";
private static final int EXPECTED_ARG_COUNT = 1;
public XPathCountFunc() {
name = NAME;
expectedArgCount = EXPECTED_ARG_COUNT;
}
public XPathCountFunc(XPathExpression[] args) throws XPathSyntaxException {
super(NAME, args, EXPECTED_ARG_COUNT, true);
}
@Override
public Object evalBody(DataInstance model, EvaluationContext evalContext, Object[] evaluatedArgs) {
if (evaluatedArgs[0] instanceof XPathNodeset) {
return new Double(((XPathNodeset)evaluatedArgs[0]).size());
} else {
throw new XPathTypeMismatchException("uses an invalid reference inside a count function");
}
}
}
|
# ... existing code ...
if (evaluatedArgs[0] instanceof XPathNodeset) {
return new Double(((XPathNodeset)evaluatedArgs[0]).size());
} else {
throw new XPathTypeMismatchException("uses an invalid reference inside a count function");
}
}
# ... rest of the code ...
|
bcaa91b14cd852b88c348aa47ab97b6dc8cde42c
|
knesset/browser_cases.py
|
knesset/browser_cases.py
|
from knesset.browser_test_case import BrowserTestCase, on_platforms
# All browser test cases must inherit from BrowserTestCase which initializes the selenium framework
# also, they must use the @on_platforms decorator. This decorator can run the test case several times - for different browser and platforms.
@on_platforms()
class MyTestCase(BrowserTestCase):
"""
Simple demo test case - just makes sure the tidbit carousel appears on the homepage
"""
def testHomepage(self):
# inside the tests you can use self.drive which will have a ready selenium driver to use
self.driver.get(self.live_server_url+'/')
# most functions throw an exception if they don't find what their looking for, so you don't have to assert
self.driver.find_element_by_id('tidbitCarousel')
|
from knesset.browser_test_case import BrowserTestCase, on_platforms
# All browser test cases must inherit from BrowserTestCase which initializes the selenium framework
# also, they must use the @on_platforms decorator. This decorator can run the test case several times - for different browser and platforms.
@on_platforms()
class MainSIteBrowserTestCase(BrowserTestCase):
"""
Simple demo test case - just makes sure the tidbit carousel appears on the homepage
"""
def testHomepage(self):
# inside the tests you can use self.drive which will have a ready selenium driver to use
self.driver.get(self.live_server_url+'/main') # Until we return old page
# most functions throw an exception if they don't find what their looking for, so you don't have to assert
self.driver.find_element_by_id('tidbitCarousel')
def testHelpPageDisplayFacebookUpdates(self):
self.driver.get(self.live_server_url + '/help') # Until we return old page
self.driver.find_element_by_id('kikar-facebook-updates-ul')
|
Update test case for current state
|
Update test case for current state
|
Python
|
bsd-3-clause
|
MeirKriheli/Open-Knesset,OriHoch/Open-Knesset,OriHoch/Open-Knesset,MeirKriheli/Open-Knesset,alonisser/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,daonb/Open-Knesset,MeirKriheli/Open-Knesset,OriHoch/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,OriHoch/Open-Knesset,daonb/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset
|
python
|
## Code Before:
from knesset.browser_test_case import BrowserTestCase, on_platforms
# All browser test cases must inherit from BrowserTestCase which initializes the selenium framework
# also, they must use the @on_platforms decorator. This decorator can run the test case several times - for different browser and platforms.
@on_platforms()
class MyTestCase(BrowserTestCase):
"""
Simple demo test case - just makes sure the tidbit carousel appears on the homepage
"""
def testHomepage(self):
# inside the tests you can use self.drive which will have a ready selenium driver to use
self.driver.get(self.live_server_url+'/')
# most functions throw an exception if they don't find what their looking for, so you don't have to assert
self.driver.find_element_by_id('tidbitCarousel')
## Instruction:
Update test case for current state
## Code After:
from knesset.browser_test_case import BrowserTestCase, on_platforms
# All browser test cases must inherit from BrowserTestCase which initializes the selenium framework
# also, they must use the @on_platforms decorator. This decorator can run the test case several times - for different browser and platforms.
@on_platforms()
class MainSIteBrowserTestCase(BrowserTestCase):
"""
Simple demo test case - just makes sure the tidbit carousel appears on the homepage
"""
def testHomepage(self):
# inside the tests you can use self.drive which will have a ready selenium driver to use
self.driver.get(self.live_server_url+'/main') # Until we return old page
# most functions throw an exception if they don't find what their looking for, so you don't have to assert
self.driver.find_element_by_id('tidbitCarousel')
def testHelpPageDisplayFacebookUpdates(self):
self.driver.get(self.live_server_url + '/help') # Until we return old page
self.driver.find_element_by_id('kikar-facebook-updates-ul')
|
// ... existing code ...
# also, they must use the @on_platforms decorator. This decorator can run the test case several times - for different browser and platforms.
@on_platforms()
class MainSIteBrowserTestCase(BrowserTestCase):
"""
Simple demo test case - just makes sure the tidbit carousel appears on the homepage
"""
// ... modified code ...
def testHomepage(self):
# inside the tests you can use self.drive which will have a ready selenium driver to use
self.driver.get(self.live_server_url+'/main') # Until we return old page
# most functions throw an exception if they don't find what their looking for, so you don't have to assert
self.driver.find_element_by_id('tidbitCarousel')
def testHelpPageDisplayFacebookUpdates(self):
self.driver.get(self.live_server_url + '/help') # Until we return old page
self.driver.find_element_by_id('kikar-facebook-updates-ul')
// ... rest of the code ...
|
e094def7ae5f7b59ef630c8952235782795e7803
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='Weitersager',
version='0.1',
description='A proxy to forward messages received via HTTP to to IRC',
author='Jochen Kupperschmidt',
author_email='[email protected]',
url='http://homework.nwsnet.de/',
)
|
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='Weitersager',
version='0.1',
description='A proxy to forward messages received via HTTP to to IRC',
long_description=long_description,
author='Jochen Kupperschmidt',
author_email='[email protected]',
url='http://homework.nwsnet.de/',
)
|
Include README as long description.
|
Include README as long description.
|
Python
|
mit
|
homeworkprod/weitersager
|
python
|
## Code Before:
from setuptools import setup
setup(
name='Weitersager',
version='0.1',
description='A proxy to forward messages received via HTTP to to IRC',
author='Jochen Kupperschmidt',
author_email='[email protected]',
url='http://homework.nwsnet.de/',
)
## Instruction:
Include README as long description.
## Code After:
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='Weitersager',
version='0.1',
description='A proxy to forward messages received via HTTP to to IRC',
long_description=long_description,
author='Jochen Kupperschmidt',
author_email='[email protected]',
url='http://homework.nwsnet.de/',
)
|
...
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
...
name='Weitersager',
version='0.1',
description='A proxy to forward messages received via HTTP to to IRC',
long_description=long_description,
author='Jochen Kupperschmidt',
author_email='[email protected]',
url='http://homework.nwsnet.de/',
...
|
f13f14b134d76acac9cad8a93b47315fb0df1ba9
|
utils/stepvals.py
|
utils/stepvals.py
|
import math
def get_range(val, step):
stepvals = [i*step for i in xrange(int(math.ceil(val/step)))][1:]
if not stepvals[-1] == val: # if last element isn't the actual value
stepvals += [val] # add it in
return stepvals
|
import math
def get_range(val, step):
if args.step >= val:
raise Exception("Step value is too large! Must be smaller than value.")
stepvals = [i*step for i in xrange(int(math.ceil(val/step)))][1:]
if not stepvals[-1] == val: # if last element isn't the actual value
stepvals += [val] # add it in
return stepvals
|
Raise exception if step value is invalid.
|
Raise exception if step value is invalid.
|
Python
|
mit
|
wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation
|
python
|
## Code Before:
import math
def get_range(val, step):
stepvals = [i*step for i in xrange(int(math.ceil(val/step)))][1:]
if not stepvals[-1] == val: # if last element isn't the actual value
stepvals += [val] # add it in
return stepvals
## Instruction:
Raise exception if step value is invalid.
## Code After:
import math
def get_range(val, step):
if args.step >= val:
raise Exception("Step value is too large! Must be smaller than value.")
stepvals = [i*step for i in xrange(int(math.ceil(val/step)))][1:]
if not stepvals[-1] == val: # if last element isn't the actual value
stepvals += [val] # add it in
return stepvals
|
// ... existing code ...
import math
def get_range(val, step):
if args.step >= val:
raise Exception("Step value is too large! Must be smaller than value.")
stepvals = [i*step for i in xrange(int(math.ceil(val/step)))][1:]
if not stepvals[-1] == val: # if last element isn't the actual value
stepvals += [val] # add it in
// ... rest of the code ...
|
6503450cdd1ebfb9e2fa874de6676bbf7531c5b1
|
src/me/dreamteam/tardis/Game.java
|
src/me/dreamteam/tardis/Game.java
|
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.image.BufferStrategy;
/**
Main Class
*/
public class Game extends Canvas {
/**
* Begin the game parameters that will allow us to define certain elements.
*/
private BufferStrategy strategy;
// This provides hardware acceleration
private boolean isRunning = true;
// Is the game running or not?
}
|
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.Dimension;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.image.BufferStrategy;
import javax.swing.JFrame;
import javax.swing.JPanel;
/**
Main Class
*/
public class Game extends Canvas {
/**
* Begin the game parameters that will allow us to define certain elements.
*/
private BufferStrategy strategy;
// This provides hardware acceleration
private boolean isRunning = true;
// Is the game running or not?
private String gameName = "Codename TARDIS ";
private String build = "Alpha ";
private String version = "0.1";
// Version set up so that we can see where we are at
public Game() {
// create a frame to contain our game
JFrame container = new JFrame(gameName + "- " + build + version);
// get hold the content of the frame and set up the resolution of the game
JPanel panel = (JPanel) container.getContentPane();
panel.setPreferredSize(new Dimension(500,650));
// Katie feel free to change this to the dimensions as given in the photoshop document
panel.setLayout(null);
// setup our canvas size and put it into the content of the frame
setBounds(0,0,500,650);
panel.add(this);
// Tell AWT not to bother repainting our canvas since we're
// going to do that our self in accelerated mode
setIgnoreRepaint(true);
// finally make the window visible
container.pack();
container.setResizable(false);
container.setVisible(true);
// add a listener to respond to the user closing the window. If they
// do we'd like to exit the game
container.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
}
/**
* Garbage collection and looping
*/
private void startGame() {
}
public void gameLoop() {
long lastLoopTime = System.currentTimeMillis();
while (isRunning) {
long delta = System.currentTimeMillis() - lastLoopTime;
lastLoopTime = System.currentTimeMillis();
}
}
/**
* Game Start
*/
public static void main(String argv[]) {
Game g =new Game();
// Start the main game loop
g.gameLoop();
}
}
|
Build the canvas, add more params, game loop, main!!!
|
Build the canvas, add more params, game loop, main!!!
|
Java
|
mit
|
The-Dream-Team/Tardis,The-Dream-Team/Tardis
|
java
|
## Code Before:
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.image.BufferStrategy;
/**
Main Class
*/
public class Game extends Canvas {
/**
* Begin the game parameters that will allow us to define certain elements.
*/
private BufferStrategy strategy;
// This provides hardware acceleration
private boolean isRunning = true;
// Is the game running or not?
}
## Instruction:
Build the canvas, add more params, game loop, main!!!
## Code After:
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.Dimension;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.image.BufferStrategy;
import javax.swing.JFrame;
import javax.swing.JPanel;
/**
Main Class
*/
public class Game extends Canvas {
/**
* Begin the game parameters that will allow us to define certain elements.
*/
private BufferStrategy strategy;
// This provides hardware acceleration
private boolean isRunning = true;
// Is the game running or not?
private String gameName = "Codename TARDIS ";
private String build = "Alpha ";
private String version = "0.1";
// Version set up so that we can see where we are at
public Game() {
// create a frame to contain our game
JFrame container = new JFrame(gameName + "- " + build + version);
// get hold the content of the frame and set up the resolution of the game
JPanel panel = (JPanel) container.getContentPane();
panel.setPreferredSize(new Dimension(500,650));
// Katie feel free to change this to the dimensions as given in the photoshop document
panel.setLayout(null);
// setup our canvas size and put it into the content of the frame
setBounds(0,0,500,650);
panel.add(this);
// Tell AWT not to bother repainting our canvas since we're
// going to do that our self in accelerated mode
setIgnoreRepaint(true);
// finally make the window visible
container.pack();
container.setResizable(false);
container.setVisible(true);
// add a listener to respond to the user closing the window. If they
// do we'd like to exit the game
container.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
}
/**
* Garbage collection and looping
*/
private void startGame() {
}
public void gameLoop() {
long lastLoopTime = System.currentTimeMillis();
while (isRunning) {
long delta = System.currentTimeMillis() - lastLoopTime;
lastLoopTime = System.currentTimeMillis();
}
}
/**
* Game Start
*/
public static void main(String argv[]) {
Game g =new Game();
// Start the main game loop
g.gameLoop();
}
}
|
...
package me.dreamteam.tardis;
import java.awt.Canvas;
import java.awt.Dimension;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.image.BufferStrategy;
import javax.swing.JFrame;
import javax.swing.JPanel;
/**
Main Class
...
private boolean isRunning = true;
// Is the game running or not?
private String gameName = "Codename TARDIS ";
private String build = "Alpha ";
private String version = "0.1";
// Version set up so that we can see where we are at
public Game() {
// create a frame to contain our game
JFrame container = new JFrame(gameName + "- " + build + version);
// get hold the content of the frame and set up the resolution of the game
JPanel panel = (JPanel) container.getContentPane();
panel.setPreferredSize(new Dimension(500,650));
// Katie feel free to change this to the dimensions as given in the photoshop document
panel.setLayout(null);
// setup our canvas size and put it into the content of the frame
setBounds(0,0,500,650);
panel.add(this);
// Tell AWT not to bother repainting our canvas since we're
// going to do that our self in accelerated mode
setIgnoreRepaint(true);
// finally make the window visible
container.pack();
container.setResizable(false);
container.setVisible(true);
// add a listener to respond to the user closing the window. If they
// do we'd like to exit the game
container.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
}
/**
* Garbage collection and looping
*/
private void startGame() {
}
public void gameLoop() {
long lastLoopTime = System.currentTimeMillis();
while (isRunning) {
long delta = System.currentTimeMillis() - lastLoopTime;
lastLoopTime = System.currentTimeMillis();
}
}
/**
* Game Start
*/
public static void main(String argv[]) {
Game g =new Game();
// Start the main game loop
g.gameLoop();
}
}
...
|
25ff50839e50a46b4e973acf0a6ae28472a71473
|
wait-for-statuses.py
|
wait-for-statuses.py
|
import urllib.request
import json
import subprocess
import time
import os
# We're limited to this number by GH Actions API
# https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#list-jobs-for-a-workflow-run
max_jobs=100
status_url = "https://api.github.com/repos/" \
+ os.environ['GITHUB_REPOSITORY'] \
+ "/actions/runs/" \
+ os.environ['GITHUB_RUN_ID'] \
+ "/jobs" \
+ "?per_page=" + str(max_jobs)
numOfJobs = int(os.environ['NUM_OF_JOBS'])
while(True):
time.sleep(60)
countCompleted = 0
with urllib.request.urlopen(status_url) as url:
data = json.loads(url.read().decode())
for j in data["jobs"]:
if(j["status"] == "completed"):
countCompleted += 1
print("Completed jobs:" + str(countCompleted) + ". Jobs overall: " + str(numOfJobs))
if(countCompleted >= numOfJobs):
break
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/master-package.sh")
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/cleanup-anaconda.sh")
|
import urllib.request
import json
import subprocess
import time
import os
import sys
# We're limited to this number by GH Actions API
# https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#list-jobs-for-a-workflow-run
max_jobs=100
status_url = "https://api.github.com/repos/" \
+ os.environ['GITHUB_REPOSITORY'] \
+ "/actions/runs/" \
+ os.environ['GITHUB_RUN_ID'] \
+ "/jobs" \
+ "?per_page=" + str(max_jobs)
numOfJobs = int(os.environ['NUM_OF_JOBS'])
if(numOfJobs > max_jobs):
sys.exit("ERROR: number of jobs exceeded max_jobs: " + str(max_jobs))
while(True):
time.sleep(60)
countCompleted = 0
with urllib.request.urlopen(status_url) as url:
data = json.loads(url.read().decode())
for j in data["jobs"]:
if(j["status"] == "completed"):
countCompleted += 1
print("Completed jobs:" + str(countCompleted) + ". Jobs overall: " + str(numOfJobs))
if(countCompleted >= numOfJobs):
break
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/master-package.sh")
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/cleanup-anaconda.sh")
|
Return an error when number of jobs exceeds max_jobs
|
Return an error when number of jobs exceeds max_jobs
ghactions API call we're using limits number of jobs
returned in one call. If jobs exceed this number they are
grouped into "pages". Page handling code shall be added
when we exceed this number.
|
Python
|
apache-2.0
|
litex-hub/litex-conda-ci,litex-hub/litex-conda-ci
|
python
|
## Code Before:
import urllib.request
import json
import subprocess
import time
import os
# We're limited to this number by GH Actions API
# https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#list-jobs-for-a-workflow-run
max_jobs=100
status_url = "https://api.github.com/repos/" \
+ os.environ['GITHUB_REPOSITORY'] \
+ "/actions/runs/" \
+ os.environ['GITHUB_RUN_ID'] \
+ "/jobs" \
+ "?per_page=" + str(max_jobs)
numOfJobs = int(os.environ['NUM_OF_JOBS'])
while(True):
time.sleep(60)
countCompleted = 0
with urllib.request.urlopen(status_url) as url:
data = json.loads(url.read().decode())
for j in data["jobs"]:
if(j["status"] == "completed"):
countCompleted += 1
print("Completed jobs:" + str(countCompleted) + ". Jobs overall: " + str(numOfJobs))
if(countCompleted >= numOfJobs):
break
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/master-package.sh")
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/cleanup-anaconda.sh")
## Instruction:
Return an error when number of jobs exceeds max_jobs
ghactions API call we're using limits number of jobs
returned in one call. If jobs exceed this number they are
grouped into "pages". Page handling code shall be added
when we exceed this number.
## Code After:
import urllib.request
import json
import subprocess
import time
import os
import sys
# We're limited to this number by GH Actions API
# https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#list-jobs-for-a-workflow-run
max_jobs=100
status_url = "https://api.github.com/repos/" \
+ os.environ['GITHUB_REPOSITORY'] \
+ "/actions/runs/" \
+ os.environ['GITHUB_RUN_ID'] \
+ "/jobs" \
+ "?per_page=" + str(max_jobs)
numOfJobs = int(os.environ['NUM_OF_JOBS'])
if(numOfJobs > max_jobs):
sys.exit("ERROR: number of jobs exceeded max_jobs: " + str(max_jobs))
while(True):
time.sleep(60)
countCompleted = 0
with urllib.request.urlopen(status_url) as url:
data = json.loads(url.read().decode())
for j in data["jobs"]:
if(j["status"] == "completed"):
countCompleted += 1
print("Completed jobs:" + str(countCompleted) + ". Jobs overall: " + str(numOfJobs))
if(countCompleted >= numOfJobs):
break
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/master-package.sh")
subprocess.call(os.environ['GITHUB_WORKSPACE'] + "/.github/scripts/cleanup-anaconda.sh")
|
// ... existing code ...
import subprocess
import time
import os
import sys
# We're limited to this number by GH Actions API
# https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#list-jobs-for-a-workflow-run
// ... modified code ...
+ "?per_page=" + str(max_jobs)
numOfJobs = int(os.environ['NUM_OF_JOBS'])
if(numOfJobs > max_jobs):
sys.exit("ERROR: number of jobs exceeded max_jobs: " + str(max_jobs))
while(True):
time.sleep(60)
// ... rest of the code ...
|
6b1ebf85ec2b76bee889936726c3caac45203675
|
pubsubpull/tests/test_config.py
|
pubsubpull/tests/test_config.py
|
from slumber.connector.ua import get
from django.test import TestCase
from pubsubpull.models import *
class TestConfiguration(TestCase):
def test_slumber(self):
response, json = get('/slumber/')
self.assertEquals(response.status_code, 200, response)
|
from slumber.connector.ua import get
from django.test import TestCase
from pubsubpull.models import *
class TestConfiguration(TestCase):
def test_slumber(self):
response, json = get('/slumber/')
self.assertEquals(response.status_code, 200, response)
self.assertTrue(json.has_key('apps'), json)
self.assertTrue(json['apps'].has_key('pubsubpull'), json)
|
Make sure pubsubpull is installed.
|
Make sure pubsubpull is installed.
|
Python
|
mit
|
KayEss/django-pubsubpull,KayEss/django-pubsubpull,KayEss/django-pubsubpull
|
python
|
## Code Before:
from slumber.connector.ua import get
from django.test import TestCase
from pubsubpull.models import *
class TestConfiguration(TestCase):
def test_slumber(self):
response, json = get('/slumber/')
self.assertEquals(response.status_code, 200, response)
## Instruction:
Make sure pubsubpull is installed.
## Code After:
from slumber.connector.ua import get
from django.test import TestCase
from pubsubpull.models import *
class TestConfiguration(TestCase):
def test_slumber(self):
response, json = get('/slumber/')
self.assertEquals(response.status_code, 200, response)
self.assertTrue(json.has_key('apps'), json)
self.assertTrue(json['apps'].has_key('pubsubpull'), json)
|
...
def test_slumber(self):
response, json = get('/slumber/')
self.assertEquals(response.status_code, 200, response)
self.assertTrue(json.has_key('apps'), json)
self.assertTrue(json['apps'].has_key('pubsubpull'), json)
...
|
f7a201f61382593baa6e8ebadfedea68563f1fef
|
examples/repeat.py
|
examples/repeat.py
|
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
Allow to specify role name on commandline
|
examples: Allow to specify role name on commandline
|
Python
|
mit
|
msgflo/msgflo-python
|
python
|
## Code Before:
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
repeater = Repeat('repeat')
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
## Instruction:
examples: Allow to specify role name on commandline
## Code After:
import sys, os, json, logging
sys.path.append(os.path.abspath("."))
import gevent
import msgflo
class Repeat(msgflo.Participant):
def __init__(self, role):
d = {
'component': 'PythonRepeat',
'label': 'Repeat input data without change',
}
msgflo.Participant.__init__(self, d, role)
def process(self, inport, msg):
self.send('out', msg.data)
self.ack(msg)
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
sys.stdout.flush()
waiter.wait()
print "Shutdown"
sys.stdout.flush()
if __name__ == '__main__':
logging.basicConfig()
main()
|
# ... existing code ...
def main():
waiter = gevent.event.AsyncResult()
role = sys.argv[1] if len(sys.argv) > 1 else 'repeat'
repeater = Repeat(role)
engine = msgflo.run(repeater, done_cb=waiter.set)
print "Repeat running on %s" % (engine.broker_url)
# ... rest of the code ...
|
9a52024ff5b8175ee8b8d4665d3c8c667003019b
|
glitter/blocks/redactor/tests.py
|
glitter/blocks/redactor/tests.py
|
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
|
from __future__ import unicode_literals
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from glitter.models import Version, ContentBlock
from glitter.pages.models import Page
from .models import Redactor
class RedactorTestCase(TestCase):
def setUp(self):
User = get_user_model()
page = Page.objects.create(url='/redactor/', title='Test page')
self.page_content_type = ContentType.objects.get_for_model(Page)
self.editor = User.objects.create_user(username='redactor', password='redactor')
page_version = Version.objects.create(
content_type=self.page_content_type, object_id=page.id,
template_name='glitter/sample.html', owner=self.editor
)
self.redactor_block = Redactor.objects.create(
content='Test'
)
self.content_block = ContentBlock.objects.create(
obj_version=page_version,
column='content',
position=1,
content_type=ContentType.objects.get_for_model(self.redactor_block),
object_id=self.redactor_block.id
)
self.redactor_block.content_block = self.content_block
self.redactor_block.save()
def test_existance(self):
redactor = Redactor.objects.get(id=self.redactor_block.id)
self.assertEqual(redactor.id, self.redactor_block.id)
|
Add test for redactor block creation
|
Add test for redactor block creation
|
Python
|
bsd-3-clause
|
developersociety/django-glitter,blancltd/django-glitter,developersociety/django-glitter,developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter
|
python
|
## Code Before:
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
## Instruction:
Add test for redactor block creation
## Code After:
from __future__ import unicode_literals
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from glitter.models import Version, ContentBlock
from glitter.pages.models import Page
from .models import Redactor
class RedactorTestCase(TestCase):
def setUp(self):
User = get_user_model()
page = Page.objects.create(url='/redactor/', title='Test page')
self.page_content_type = ContentType.objects.get_for_model(Page)
self.editor = User.objects.create_user(username='redactor', password='redactor')
page_version = Version.objects.create(
content_type=self.page_content_type, object_id=page.id,
template_name='glitter/sample.html', owner=self.editor
)
self.redactor_block = Redactor.objects.create(
content='Test'
)
self.content_block = ContentBlock.objects.create(
obj_version=page_version,
column='content',
position=1,
content_type=ContentType.objects.get_for_model(self.redactor_block),
object_id=self.redactor_block.id
)
self.redactor_block.content_block = self.content_block
self.redactor_block.save()
def test_existance(self):
redactor = Redactor.objects.get(id=self.redactor_block.id)
self.assertEqual(redactor.id, self.redactor_block.id)
|
...
from __future__ import unicode_literals
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from glitter.models import Version, ContentBlock
from glitter.pages.models import Page
from .models import Redactor
class RedactorTestCase(TestCase):
def setUp(self):
User = get_user_model()
page = Page.objects.create(url='/redactor/', title='Test page')
self.page_content_type = ContentType.objects.get_for_model(Page)
self.editor = User.objects.create_user(username='redactor', password='redactor')
page_version = Version.objects.create(
content_type=self.page_content_type, object_id=page.id,
template_name='glitter/sample.html', owner=self.editor
)
self.redactor_block = Redactor.objects.create(
content='Test'
)
self.content_block = ContentBlock.objects.create(
obj_version=page_version,
column='content',
position=1,
content_type=ContentType.objects.get_for_model(self.redactor_block),
object_id=self.redactor_block.id
)
self.redactor_block.content_block = self.content_block
self.redactor_block.save()
def test_existance(self):
redactor = Redactor.objects.get(id=self.redactor_block.id)
self.assertEqual(redactor.id, self.redactor_block.id)
...
|
f30a560db83d8a7ac87685c69f5b519faaa929fa
|
project_issue_department/__openerp__.py
|
project_issue_department/__openerp__.py
|
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
|
Fix pep8 to pass super checks
|
Fix pep8 to pass super checks
|
Python
|
agpl-3.0
|
OCA/department,Antiun/department,acsone/department,kmee/department,Endika/department
|
python
|
## Code Before:
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
## Instruction:
Fix pep8 to pass super checks
## Code After:
{
'name': 'Project Issue with Department',
'version': '1.1',
"category": "Project Management",
'description': """\
Add Department field to Project Issues.
Selecting a Project for an issue will automatically populate this with the
Project's defined Department.
""",
'author': 'Daniel Reis',
'website': '[email protected]',
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False,
'auto_install': True,
}
|
// ... existing code ...
'depends': [
'project_issue',
'project_department',
],
'update_xml': [
'project_issue_view.xml',
'security/ir.model.access.csv',
// ... modified code ...
'application': False,
'auto_install': True,
}
// ... rest of the code ...
|
685cc22bc92f5b35c9ec6fcc7fe8e65bda3ecf1e
|
src/math/p_asin.c
|
src/math/p_asin.c
|
/**
*
* Caclulates the inverse sine (arc sine) of the argument 'a'. Arguments must be
* in the range -1 to 1. The function does not check for illegal input values.
* Results are in the range -pi/2 to pi/2.
*
* @param a Pointer to input vector
*
* @param c Pointer to output vector
*
* @param n Size of 'a' and 'c' vector.
*
* @return None
*
*/
#include <math.h>
void p_asin_f32(const float *a, float *c, int n)
{
int i;
for (i = 0; i < n; i++) {
*(c + i) = asinf(*(a + i));
}
}
|
static const float pi_2 = (float) M_PI / 2.f;
/*
* 0 <= x <= 1
* asin x = pi/2 - (1 - x)^(1/2) * (a0 + a1 * x + ... + a3 * x^3) + e(x)
* |e(x)| <= 5 * 10^-5
*/
static inline float __p_asin_pos(const float x)
{
const float a0 = 1.5707288f;
const float a1 = -0.2121144f;
const float a2 = 0.0742610f;
const float a3 = -0.0187293f;
float a_ = 1.f - x;
float a;
p_sqrt_f32(&a_, &a, 1);
return pi_2 - a * (a0 + a1 * x + a2 * x * x + a3 * x * x * x);
}
/*
* -1 <= x <= 1
* asin(-x) = - asin x
*/
static inline float _p_asin(const float x)
{
if (x >= 0.f)
return __p_asin_pos(x);
else
return -1.f * __p_asin_pos(-x);
}
/**
*
* Caclulates the inverse sine (arc sine) of the argument 'a'. Arguments must be
* in the range -1 to 1. The function does not check for illegal input values.
* Results are in the range -pi/2 to pi/2.
*
* @param a Pointer to input vector
*
* @param c Pointer to output vector
*
* @param n Size of 'a' and 'c' vector.
*
* @return None
*
*/
void p_asin_f32(const float *a, float *c, int n)
{
int i;
for (i = 0; i < n; i++) {
c[i] = _p_asin(a[i]);
}
}
|
Implement the inverse sine function.
|
math:asin: Implement the inverse sine function.
Signed-off-by: Mansour Moufid <[email protected]>
|
C
|
apache-2.0
|
debug-de-su-ka/pal,8l/pal,eliteraspberries/pal,Adamszk/pal3,8l/pal,Adamszk/pal3,Adamszk/pal3,aolofsson/pal,debug-de-su-ka/pal,mateunho/pal,parallella/pal,parallella/pal,8l/pal,aolofsson/pal,debug-de-su-ka/pal,olajep/pal,mateunho/pal,8l/pal,eliteraspberries/pal,olajep/pal,parallella/pal,parallella/pal,debug-de-su-ka/pal,mateunho/pal,parallella/pal,debug-de-su-ka/pal,Adamszk/pal3,eliteraspberries/pal,mateunho/pal,aolofsson/pal,eliteraspberries/pal,olajep/pal,olajep/pal,eliteraspberries/pal,aolofsson/pal,mateunho/pal
|
c
|
## Code Before:
/**
*
* Caclulates the inverse sine (arc sine) of the argument 'a'. Arguments must be
* in the range -1 to 1. The function does not check for illegal input values.
* Results are in the range -pi/2 to pi/2.
*
* @param a Pointer to input vector
*
* @param c Pointer to output vector
*
* @param n Size of 'a' and 'c' vector.
*
* @return None
*
*/
#include <math.h>
void p_asin_f32(const float *a, float *c, int n)
{
int i;
for (i = 0; i < n; i++) {
*(c + i) = asinf(*(a + i));
}
}
## Instruction:
math:asin: Implement the inverse sine function.
Signed-off-by: Mansour Moufid <[email protected]>
## Code After:
static const float pi_2 = (float) M_PI / 2.f;
/*
* 0 <= x <= 1
* asin x = pi/2 - (1 - x)^(1/2) * (a0 + a1 * x + ... + a3 * x^3) + e(x)
* |e(x)| <= 5 * 10^-5
*/
static inline float __p_asin_pos(const float x)
{
const float a0 = 1.5707288f;
const float a1 = -0.2121144f;
const float a2 = 0.0742610f;
const float a3 = -0.0187293f;
float a_ = 1.f - x;
float a;
p_sqrt_f32(&a_, &a, 1);
return pi_2 - a * (a0 + a1 * x + a2 * x * x + a3 * x * x * x);
}
/*
* -1 <= x <= 1
* asin(-x) = - asin x
*/
static inline float _p_asin(const float x)
{
if (x >= 0.f)
return __p_asin_pos(x);
else
return -1.f * __p_asin_pos(-x);
}
/**
*
* Caclulates the inverse sine (arc sine) of the argument 'a'. Arguments must be
* in the range -1 to 1. The function does not check for illegal input values.
* Results are in the range -pi/2 to pi/2.
*
* @param a Pointer to input vector
*
* @param c Pointer to output vector
*
* @param n Size of 'a' and 'c' vector.
*
* @return None
*
*/
void p_asin_f32(const float *a, float *c, int n)
{
int i;
for (i = 0; i < n; i++) {
c[i] = _p_asin(a[i]);
}
}
|
# ... existing code ...
static const float pi_2 = (float) M_PI / 2.f;
/*
* 0 <= x <= 1
* asin x = pi/2 - (1 - x)^(1/2) * (a0 + a1 * x + ... + a3 * x^3) + e(x)
* |e(x)| <= 5 * 10^-5
*/
static inline float __p_asin_pos(const float x)
{
const float a0 = 1.5707288f;
const float a1 = -0.2121144f;
const float a2 = 0.0742610f;
const float a3 = -0.0187293f;
float a_ = 1.f - x;
float a;
p_sqrt_f32(&a_, &a, 1);
return pi_2 - a * (a0 + a1 * x + a2 * x * x + a3 * x * x * x);
}
/*
* -1 <= x <= 1
* asin(-x) = - asin x
*/
static inline float _p_asin(const float x)
{
if (x >= 0.f)
return __p_asin_pos(x);
else
return -1.f * __p_asin_pos(-x);
}
/**
*
# ... modified code ...
* @return None
*
*/
void p_asin_f32(const float *a, float *c, int n)
{
int i;
for (i = 0; i < n; i++) {
c[i] = _p_asin(a[i]);
}
}
# ... rest of the code ...
|
a459c9bd1135ede49e9b2f55a633f86d7cdb81e2
|
tests/mocks/RPi.py
|
tests/mocks/RPi.py
|
class GPIO(object):
BOARD = 'board'
IN = 'in'
OUT = 'out'
PUD_UP = 'pud_up'
FALLING = 'falling'
HIGH = 'high'
LOW = 'low'
@classmethod
def setmode(cls, mode):
print("Mock: set GPIO mode {}".format(mode))
@classmethod
def setup(cls, pin, direction, **kwargs):
print("Mock: setup GPIO pin {} to {}".format(pin, direction))
@classmethod
def output(cls, pin, status):
print("Mock: output GPIO pin {} to {}".format(pin, status))
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
print("Mock: detect GPIO pin {} when {}".format(pin, status))
@classmethod
def cleanup(cls):
print("Mock: quit GPIO")
|
class GPIO(object):
BOARD = 'board'
IN = 'in'
OUT = 'out'
PUD_UP = 'pud_up'
FALLING = 'falling'
HIGH = 'high'
LOW = 'low'
@classmethod
def setmode(cls, mode):
print("Mock: set GPIO mode {}".format(mode))
@classmethod
def setup(cls, pin, direction, **kwargs):
print("Mock: setup GPIO pin {} to {}".format(pin, direction))
@classmethod
def output(cls, pin, status):
pass
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
print("Mock: detect GPIO pin {} when {}".format(pin, status))
@classmethod
def cleanup(cls):
print("Mock: quit GPIO")
|
Remove print message in mocks
|
Remove print message in mocks
|
Python
|
mit
|
werdeil/pibooth,werdeil/pibooth
|
python
|
## Code Before:
class GPIO(object):
BOARD = 'board'
IN = 'in'
OUT = 'out'
PUD_UP = 'pud_up'
FALLING = 'falling'
HIGH = 'high'
LOW = 'low'
@classmethod
def setmode(cls, mode):
print("Mock: set GPIO mode {}".format(mode))
@classmethod
def setup(cls, pin, direction, **kwargs):
print("Mock: setup GPIO pin {} to {}".format(pin, direction))
@classmethod
def output(cls, pin, status):
print("Mock: output GPIO pin {} to {}".format(pin, status))
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
print("Mock: detect GPIO pin {} when {}".format(pin, status))
@classmethod
def cleanup(cls):
print("Mock: quit GPIO")
## Instruction:
Remove print message in mocks
## Code After:
class GPIO(object):
BOARD = 'board'
IN = 'in'
OUT = 'out'
PUD_UP = 'pud_up'
FALLING = 'falling'
HIGH = 'high'
LOW = 'low'
@classmethod
def setmode(cls, mode):
print("Mock: set GPIO mode {}".format(mode))
@classmethod
def setup(cls, pin, direction, **kwargs):
print("Mock: setup GPIO pin {} to {}".format(pin, direction))
@classmethod
def output(cls, pin, status):
pass
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
print("Mock: detect GPIO pin {} when {}".format(pin, status))
@classmethod
def cleanup(cls):
print("Mock: quit GPIO")
|
...
@classmethod
def output(cls, pin, status):
pass
@classmethod
def add_event_detect(cls, pin, status, **kwargs):
...
|
69df0f5148b998cc7757405b9965200276ce55b9
|
fireplace/cards/league/adventure.py
|
fireplace/cards/league/adventure.py
|
from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
|
from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
|
Implement Temple Escape event choices
|
Implement Temple Escape event choices
|
Python
|
agpl-3.0
|
beheh/fireplace,NightKev/fireplace,jleclanche/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,Ragowit/fireplace,Ragowit/fireplace
|
python
|
## Code Before:
from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
## Instruction:
Implement Temple Escape event choices
## Code After:
from ..utils import *
##
# Spells
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
|
...
# Medivh's Locket
class LOEA16_12:
play = Morph(FRIENDLY_HAND, "GVG_003")
##
# Temple Escape events
# Pit of Spikes
class LOEA04_06:
choose = ("LOEA04_06a", "LOEA04_06b")
# Swing Across
class LOEA04_06a:
play = COINFLIP & Hit(FRIENDLY_HERO, 10)
# Walk Across Gingerly
class LOEA04_06b:
play = Hit(FRIENDLY_HERO, 5)
# A Glowing Pool
class LOEA04_28:
choose = ("LOEA04_28a", "LOEA04_28b")
# Drink Deeply
class LOEA04_28a:
play = Draw(CONTROLLER)
# Wade Through
class LOEA04_28b:
play = GainMana(CONTROLLER, 1)
# The Eye
class LOEA04_29:
choose = ("LOEA04_29a", "LOEA04_29b")
# Touch It
class LOEA04_29a:
play = Heal(FRIENDLY_HERO, 10)
# Investigate the Runes
class LOEA04_29b:
play = Draw(CONTROLLER) * 2
# The Darkness
class LOEA04_30:
choose = ("LOEA04_30a", "LOEA04_31b")
# Take the Shortcut
class LOEA04_30a:
play = Summon(OPPONENT, "CS2_186")
# Do Nothing
class LOEA04_31b:
pass
...
|
a65dd8dc3df3569c3e5fddcc5413acf7db0ef29d
|
src/test/java/specs/VariableSpecs.java
|
src/test/java/specs/VariableSpecs.java
|
package specs;
import static com.greghaskins.spectrum.Spectrum.beforeEach;
import static com.greghaskins.spectrum.Spectrum.describe;
import static com.greghaskins.spectrum.Spectrum.it;
import static junit.framework.TestCase.assertNull;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import com.greghaskins.spectrum.Spectrum;
import com.greghaskins.spectrum.Variable;
import org.junit.runner.RunWith;
@RunWith(Spectrum.class)
public class VariableSpecs {
{
describe("The Variable convenience wrapper", () -> {
final Variable<Integer> counter = new Variable<>();
beforeEach(() -> {
counter.set(0);
});
beforeEach(() -> {
counter.set(counter.get() + 1);
});
it("lets you work around Java's requirement that closures only use `final` variables", () -> {
counter.set(counter.get() + 1);
assertThat(counter.get(), is(2));
});
it("can optionally have an initial value set", () -> {
final Variable<String> name = new Variable<>("Alice");
assertThat(name.get(), is("Alice"));
});
it("has a null value if not specified", () -> {
final Variable<String> name = new Variable<>();
assertNull(name.get());
});
});
}
}
|
package specs;
import static com.greghaskins.spectrum.Spectrum.beforeAll;
import static com.greghaskins.spectrum.Spectrum.beforeEach;
import static com.greghaskins.spectrum.Spectrum.describe;
import static com.greghaskins.spectrum.Spectrum.it;
import static junit.framework.TestCase.assertNull;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import com.greghaskins.spectrum.Spectrum;
import com.greghaskins.spectrum.Variable;
import org.junit.runner.RunWith;
@RunWith(Spectrum.class)
public class VariableSpecs {
{
describe("The Variable convenience wrapper", () -> {
final Variable<Integer> counter = new Variable<>();
beforeAll(() -> {
counter.set(0);
});
beforeEach(() -> {
final int previousValue = counter.get();
counter.set(previousValue + 1);
});
it("lets you work around Java's requirement that closures only use `final` variables", () -> {
assertThat(counter.get(), is(1));
});
it("can share values across scopes, so use it carefully", () -> {
assertThat(counter.get(), is(2));
});
it("can optionally have an initial value set", () -> {
final Variable<String> name = new Variable<>("Alice");
assertThat(name.get(), is("Alice"));
});
it("has a null value if not specified", () -> {
final Variable<String> name = new Variable<>();
assertNull(name.get());
});
});
}
}
|
Clarify specs for Variable helper
|
Clarify specs for Variable helper
|
Java
|
mit
|
greghaskins/spectrum
|
java
|
## Code Before:
package specs;
import static com.greghaskins.spectrum.Spectrum.beforeEach;
import static com.greghaskins.spectrum.Spectrum.describe;
import static com.greghaskins.spectrum.Spectrum.it;
import static junit.framework.TestCase.assertNull;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import com.greghaskins.spectrum.Spectrum;
import com.greghaskins.spectrum.Variable;
import org.junit.runner.RunWith;
@RunWith(Spectrum.class)
public class VariableSpecs {
{
describe("The Variable convenience wrapper", () -> {
final Variable<Integer> counter = new Variable<>();
beforeEach(() -> {
counter.set(0);
});
beforeEach(() -> {
counter.set(counter.get() + 1);
});
it("lets you work around Java's requirement that closures only use `final` variables", () -> {
counter.set(counter.get() + 1);
assertThat(counter.get(), is(2));
});
it("can optionally have an initial value set", () -> {
final Variable<String> name = new Variable<>("Alice");
assertThat(name.get(), is("Alice"));
});
it("has a null value if not specified", () -> {
final Variable<String> name = new Variable<>();
assertNull(name.get());
});
});
}
}
## Instruction:
Clarify specs for Variable helper
## Code After:
package specs;
import static com.greghaskins.spectrum.Spectrum.beforeAll;
import static com.greghaskins.spectrum.Spectrum.beforeEach;
import static com.greghaskins.spectrum.Spectrum.describe;
import static com.greghaskins.spectrum.Spectrum.it;
import static junit.framework.TestCase.assertNull;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import com.greghaskins.spectrum.Spectrum;
import com.greghaskins.spectrum.Variable;
import org.junit.runner.RunWith;
@RunWith(Spectrum.class)
public class VariableSpecs {
{
describe("The Variable convenience wrapper", () -> {
final Variable<Integer> counter = new Variable<>();
beforeAll(() -> {
counter.set(0);
});
beforeEach(() -> {
final int previousValue = counter.get();
counter.set(previousValue + 1);
});
it("lets you work around Java's requirement that closures only use `final` variables", () -> {
assertThat(counter.get(), is(1));
});
it("can share values across scopes, so use it carefully", () -> {
assertThat(counter.get(), is(2));
});
it("can optionally have an initial value set", () -> {
final Variable<String> name = new Variable<>("Alice");
assertThat(name.get(), is("Alice"));
});
it("has a null value if not specified", () -> {
final Variable<String> name = new Variable<>();
assertNull(name.get());
});
});
}
}
|
# ... existing code ...
package specs;
import static com.greghaskins.spectrum.Spectrum.beforeAll;
import static com.greghaskins.spectrum.Spectrum.beforeEach;
import static com.greghaskins.spectrum.Spectrum.describe;
import static com.greghaskins.spectrum.Spectrum.it;
# ... modified code ...
final Variable<Integer> counter = new Variable<>();
beforeAll(() -> {
counter.set(0);
});
beforeEach(() -> {
final int previousValue = counter.get();
counter.set(previousValue + 1);
});
it("lets you work around Java's requirement that closures only use `final` variables", () -> {
assertThat(counter.get(), is(1));
});
it("can share values across scopes, so use it carefully", () -> {
assertThat(counter.get(), is(2));
});
# ... rest of the code ...
|
18442c5cf486e16f4cb418a0d7ef2a2dd9ea7c34
|
Fastor/tensor/ScalarIndexing.h
|
Fastor/tensor/ScalarIndexing.h
|
// Scalar indexing non-const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
FASTOR_INLINE T& operator()(Args ... args) {
return _data[get_flat_index(args...)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_NONCONST_H
#ifndef SCALAR_INDEXING_CONST_H
#define SCALAR_INDEXING_CONST_H
// Scalar indexing const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator()(Args ... args) const {
return _data[get_flat_index(args...)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_CONST_H
|
// Scalar indexing non-const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
FASTOR_INLINE T& operator()(Args ... args) {
return _data[get_flat_index(args...)];
}
template<typename Arg, typename std::enable_if<1==dimension_t::value &&
is_arithmetic_pack<Arg>::value,bool>::type =0>
FASTOR_INLINE T& operator[](Arg arg) {
return _data[get_flat_index(arg)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_NONCONST_H
#ifndef SCALAR_INDEXING_CONST_H
#define SCALAR_INDEXING_CONST_H
// Scalar indexing const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator()(Args ... args) const {
return _data[get_flat_index(args...)];
}
template<typename Arg, typename std::enable_if<1==dimension_t::value &&
is_arithmetic_pack<Arg>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator[](Arg arg) const {
return _data[get_flat_index(arg)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_CONST_H
|
Allow indexing by operator[] when dimension is 1
|
Allow indexing by operator[] when dimension is 1
|
C
|
mit
|
romeric/Fastor,romeric/Fastor,romeric/Fastor
|
c
|
## Code Before:
// Scalar indexing non-const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
FASTOR_INLINE T& operator()(Args ... args) {
return _data[get_flat_index(args...)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_NONCONST_H
#ifndef SCALAR_INDEXING_CONST_H
#define SCALAR_INDEXING_CONST_H
// Scalar indexing const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator()(Args ... args) const {
return _data[get_flat_index(args...)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_CONST_H
## Instruction:
Allow indexing by operator[] when dimension is 1
## Code After:
// Scalar indexing non-const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
FASTOR_INLINE T& operator()(Args ... args) {
return _data[get_flat_index(args...)];
}
template<typename Arg, typename std::enable_if<1==dimension_t::value &&
is_arithmetic_pack<Arg>::value,bool>::type =0>
FASTOR_INLINE T& operator[](Arg arg) {
return _data[get_flat_index(arg)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_NONCONST_H
#ifndef SCALAR_INDEXING_CONST_H
#define SCALAR_INDEXING_CONST_H
// Scalar indexing const
//----------------------------------------------------------------------------------------------------------//
template<typename... Args, typename std::enable_if<sizeof...(Args)==dimension_t::value &&
is_arithmetic_pack<Args...>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator()(Args ... args) const {
return _data[get_flat_index(args...)];
}
template<typename Arg, typename std::enable_if<1==dimension_t::value &&
is_arithmetic_pack<Arg>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator[](Arg arg) const {
return _data[get_flat_index(arg)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_CONST_H
|
// ... existing code ...
is_arithmetic_pack<Args...>::value,bool>::type =0>
FASTOR_INLINE T& operator()(Args ... args) {
return _data[get_flat_index(args...)];
}
template<typename Arg, typename std::enable_if<1==dimension_t::value &&
is_arithmetic_pack<Arg>::value,bool>::type =0>
FASTOR_INLINE T& operator[](Arg arg) {
return _data[get_flat_index(arg)];
}
//----------------------------------------------------------------------------------------------------------//
// ... modified code ...
constexpr FASTOR_INLINE const T& operator()(Args ... args) const {
return _data[get_flat_index(args...)];
}
template<typename Arg, typename std::enable_if<1==dimension_t::value &&
is_arithmetic_pack<Arg>::value,bool>::type =0>
constexpr FASTOR_INLINE const T& operator[](Arg arg) const {
return _data[get_flat_index(arg)];
}
//----------------------------------------------------------------------------------------------------------//
#endif // SCALAR_INDEXING_CONST_H
// ... rest of the code ...
|
7902711589cf3ce0fe492442dbfec2497fd06dc4
|
build-aux/dist-docs.py
|
build-aux/dist-docs.py
|
import os
import shutil
import subprocess
from pathlib import PurePath
references = [
'docs/json-glib/json-glib-1.0',
]
sourceroot = os.environ.get('MESON_SOURCE_ROOT')
buildroot = os.environ.get('MESON_BUILD_ROOT')
distroot = os.environ.get('MESON_DIST_ROOT')
for reference in references:
src_path = os.path.join(buildroot, reference)
if os.path.isdir(src_path):
dst_path = os.path.join(distroot, reference)
shutil.copytree(src_path, dst_path)
|
import os
import shutil
references = [
'doc/json-glib-1.0',
]
sourceroot = os.environ.get('MESON_SOURCE_ROOT')
buildroot = os.environ.get('MESON_BUILD_ROOT')
distroot = os.environ.get('MESON_DIST_ROOT')
for reference in references:
src_path = os.path.join(buildroot, reference)
if os.path.isdir(src_path):
dst_path = os.path.join(distroot, reference)
shutil.copytree(src_path, dst_path)
|
Fix the docs path in the dist script
|
build: Fix the docs path in the dist script
Fixes: #66
|
Python
|
lgpl-2.1
|
frida/json-glib,GNOME/json-glib,GNOME/json-glib,frida/json-glib
|
python
|
## Code Before:
import os
import shutil
import subprocess
from pathlib import PurePath
references = [
'docs/json-glib/json-glib-1.0',
]
sourceroot = os.environ.get('MESON_SOURCE_ROOT')
buildroot = os.environ.get('MESON_BUILD_ROOT')
distroot = os.environ.get('MESON_DIST_ROOT')
for reference in references:
src_path = os.path.join(buildroot, reference)
if os.path.isdir(src_path):
dst_path = os.path.join(distroot, reference)
shutil.copytree(src_path, dst_path)
## Instruction:
build: Fix the docs path in the dist script
Fixes: #66
## Code After:
import os
import shutil
references = [
'doc/json-glib-1.0',
]
sourceroot = os.environ.get('MESON_SOURCE_ROOT')
buildroot = os.environ.get('MESON_BUILD_ROOT')
distroot = os.environ.get('MESON_DIST_ROOT')
for reference in references:
src_path = os.path.join(buildroot, reference)
if os.path.isdir(src_path):
dst_path = os.path.join(distroot, reference)
shutil.copytree(src_path, dst_path)
|
...
import os
import shutil
references = [
'doc/json-glib-1.0',
]
sourceroot = os.environ.get('MESON_SOURCE_ROOT')
...
|
b33b063e49b394265bc890f6d3b39da08e355416
|
blogs/tests/test_parser.py
|
blogs/tests/test_parser.py
|
from unittest import TestCase
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(TestCase):
def setUp(self):
self.test_file_path = get_test_rss_path()
self.entries = get_all_entries("file://{}".format(self.test_file_path))
def test_entries(self):
""" Make sure we can parse RSS entries """
self.assertEqual(len(self.entries), 25)
|
import datetime
import unittest
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_file_path = get_test_rss_path()
cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
def test_entries(self):
self.assertEqual(len(self.entries), 25)
self.assertEqual(
self.entries[0]['title'],
'Introducing Electronic Contributor Agreements'
)
self.assertIn(
"We're happy to announce the new way to file a contributor "
"agreement: on the web at",
self.entries[0]['summary']
)
self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
self.assertEqual(
self.entries[0]['url'],
'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
)
|
Add some tests to make sure we can parse RSS feeds
|
Add some tests to make sure we can parse RSS feeds
|
Python
|
apache-2.0
|
manhhomienbienthuy/pythondotorg,proevo/pythondotorg,manhhomienbienthuy/pythondotorg,proevo/pythondotorg,Mariatta/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,python/pythondotorg,manhhomienbienthuy/pythondotorg,python/pythondotorg,Mariatta/pythondotorg,manhhomienbienthuy/pythondotorg,Mariatta/pythondotorg,python/pythondotorg,proevo/pythondotorg,python/pythondotorg
|
python
|
## Code Before:
from unittest import TestCase
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(TestCase):
def setUp(self):
self.test_file_path = get_test_rss_path()
self.entries = get_all_entries("file://{}".format(self.test_file_path))
def test_entries(self):
""" Make sure we can parse RSS entries """
self.assertEqual(len(self.entries), 25)
## Instruction:
Add some tests to make sure we can parse RSS feeds
## Code After:
import datetime
import unittest
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_file_path = get_test_rss_path()
cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
def test_entries(self):
self.assertEqual(len(self.entries), 25)
self.assertEqual(
self.entries[0]['title'],
'Introducing Electronic Contributor Agreements'
)
self.assertIn(
"We're happy to announce the new way to file a contributor "
"agreement: on the web at",
self.entries[0]['summary']
)
self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
self.assertEqual(
self.entries[0]['url'],
'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
)
|
# ... existing code ...
import datetime
import unittest
from ..parser import get_all_entries
from .utils import get_test_rss_path
class BlogParserTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_file_path = get_test_rss_path()
cls.entries = get_all_entries("file://{}".format(cls.test_file_path))
def test_entries(self):
self.assertEqual(len(self.entries), 25)
self.assertEqual(
self.entries[0]['title'],
'Introducing Electronic Contributor Agreements'
)
self.assertIn(
"We're happy to announce the new way to file a contributor "
"agreement: on the web at",
self.entries[0]['summary']
)
self.assertIsInstance(self.entries[0]['pub_date'], datetime.datetime)
self.assertEqual(
self.entries[0]['url'],
'http://feedproxy.google.com/~r/PythonInsider/~3/tGNCqyOiun4/introducing-electronic-contributor.html'
)
# ... rest of the code ...
|
343524ddeac29e59d7c214a62a721c2065583503
|
setuptools_extversion/__init__.py
|
setuptools_extversion/__init__.py
|
VERSION_PROVIDER_KEY = 'extversion'
def version_calc(dist, attr, value):
"""
Handler for parameter to setup(extversion=value)
"""
if attr == VERSION_PROVIDER_KEY:
extversion = value
dist.metadata.version = extversion(dist)
class command(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __call__(self, distribution, metadata, command):
return subprocess.check_output(*self.args, **self.kwargs).strip()
class function(object):
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, *args, **kwargs):
if isinstance(self.func, basestring):
ep = pkg_resources.EntryPoint.parse('x=' + self.func)
self.func = ep.load(False)
args = list(self.args + args)
kwargs = dict(self.kwargs)
kwargs.update(kwargs)
return self.func(*args, **kwargs)
|
import subprocess
VERSION_PROVIDER_KEY = 'extversion'
def version_calc(dist, attr, value):
"""
Handler for parameter to setup(extversion=value)
"""
if attr == VERSION_PROVIDER_KEY:
if callable(value):
extversion = value
elif hasattr(value, 'get'):
if value.get('command'):
extversion = command(value.get('command'), shell=True)
else:
raise Exception('Unknown type for %s = %r' % (attr, value))
dist.metadata.version = extversion(dist)
class command(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __call__(self, distribution):
return subprocess.check_output(*self.args, **self.kwargs).strip()
class function(object):
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, *args, **kwargs):
if isinstance(self.func, basestring):
ep = pkg_resources.EntryPoint.parse('x=' + self.func)
self.func = ep.load(False)
args = list(self.args + args)
kwargs = dict(self.kwargs)
kwargs.update(kwargs)
return self.func(*args, **kwargs)
|
Add support for providing command string
|
Add support for providing command string
User can provide a command string in a 'command' key -- e.g.:
setup(
...
setup_requires='setuptools_extversion',
extversion={
'command': 'git describe --tags --dirty',
}
...
)
|
Python
|
mit
|
msabramo/python_setuptools_extversion
|
python
|
## Code Before:
VERSION_PROVIDER_KEY = 'extversion'
def version_calc(dist, attr, value):
"""
Handler for parameter to setup(extversion=value)
"""
if attr == VERSION_PROVIDER_KEY:
extversion = value
dist.metadata.version = extversion(dist)
class command(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __call__(self, distribution, metadata, command):
return subprocess.check_output(*self.args, **self.kwargs).strip()
class function(object):
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, *args, **kwargs):
if isinstance(self.func, basestring):
ep = pkg_resources.EntryPoint.parse('x=' + self.func)
self.func = ep.load(False)
args = list(self.args + args)
kwargs = dict(self.kwargs)
kwargs.update(kwargs)
return self.func(*args, **kwargs)
## Instruction:
Add support for providing command string
User can provide a command string in a 'command' key -- e.g.:
setup(
...
setup_requires='setuptools_extversion',
extversion={
'command': 'git describe --tags --dirty',
}
...
)
## Code After:
import subprocess
VERSION_PROVIDER_KEY = 'extversion'
def version_calc(dist, attr, value):
"""
Handler for parameter to setup(extversion=value)
"""
if attr == VERSION_PROVIDER_KEY:
if callable(value):
extversion = value
elif hasattr(value, 'get'):
if value.get('command'):
extversion = command(value.get('command'), shell=True)
else:
raise Exception('Unknown type for %s = %r' % (attr, value))
dist.metadata.version = extversion(dist)
class command(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __call__(self, distribution):
return subprocess.check_output(*self.args, **self.kwargs).strip()
class function(object):
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, *args, **kwargs):
if isinstance(self.func, basestring):
ep = pkg_resources.EntryPoint.parse('x=' + self.func)
self.func = ep.load(False)
args = list(self.args + args)
kwargs = dict(self.kwargs)
kwargs.update(kwargs)
return self.func(*args, **kwargs)
|
...
import subprocess
VERSION_PROVIDER_KEY = 'extversion'
...
"""
if attr == VERSION_PROVIDER_KEY:
if callable(value):
extversion = value
elif hasattr(value, 'get'):
if value.get('command'):
extversion = command(value.get('command'), shell=True)
else:
raise Exception('Unknown type for %s = %r' % (attr, value))
dist.metadata.version = extversion(dist)
...
self.args = args
self.kwargs = kwargs
def __call__(self, distribution):
return subprocess.check_output(*self.args, **self.kwargs).strip()
...
|
b3e684d851be027874c933aaf5274ae3d557ad66
|
src/main/kotlin/siilinkari/vm/Frame.kt
|
src/main/kotlin/siilinkari/vm/Frame.kt
|
package siilinkari.vm
import siilinkari.objects.Value
/**
* Frame of local variables.
*/
class Frame(size: Int) {
private val bindings = Array<Value>(size) { Value.Bool.False }
/**
* Assigns a new value to existing variable.
*/
operator fun set(index: Int, value: Value) {
bindings[index] = value
}
/**
* Returns the value bound to given variable.
*/
operator fun get(index: Int): Value =
bindings[index]
}
|
package siilinkari.vm
import siilinkari.objects.Value
/**
* Frame of local variables.
*/
class Frame(size: Int) {
private val bindings = arrayOfNulls<Value?>(size)
/**
* Assigns a new value to existing variable.
*/
operator fun set(index: Int, value: Value) {
bindings[index] = value
}
/**
* Returns the value bound to given variable.
*/
operator fun get(index: Int): Value =
bindings[index]!!
}
|
Initialize frame bindings to array of nulls
|
Initialize frame bindings to array of nulls
|
Kotlin
|
mit
|
komu/siilinkari
|
kotlin
|
## Code Before:
package siilinkari.vm
import siilinkari.objects.Value
/**
* Frame of local variables.
*/
class Frame(size: Int) {
private val bindings = Array<Value>(size) { Value.Bool.False }
/**
* Assigns a new value to existing variable.
*/
operator fun set(index: Int, value: Value) {
bindings[index] = value
}
/**
* Returns the value bound to given variable.
*/
operator fun get(index: Int): Value =
bindings[index]
}
## Instruction:
Initialize frame bindings to array of nulls
## Code After:
package siilinkari.vm
import siilinkari.objects.Value
/**
* Frame of local variables.
*/
class Frame(size: Int) {
private val bindings = arrayOfNulls<Value?>(size)
/**
* Assigns a new value to existing variable.
*/
operator fun set(index: Int, value: Value) {
bindings[index] = value
}
/**
* Returns the value bound to given variable.
*/
operator fun get(index: Int): Value =
bindings[index]!!
}
|
# ... existing code ...
*/
class Frame(size: Int) {
private val bindings = arrayOfNulls<Value?>(size)
/**
* Assigns a new value to existing variable.
# ... modified code ...
* Returns the value bound to given variable.
*/
operator fun get(index: Int): Value =
bindings[index]!!
}
# ... rest of the code ...
|
4814ef9d78070c14ab4685b802543ba0afa26754
|
django/users/views.py
|
django/users/views.py
|
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super(UserViewSet, self).retrieve(request, pk)
|
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super().retrieve(request, pk)
|
Use Python 3 style for super
|
Use Python 3 style for super
|
Python
|
bsd-3-clause
|
FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja
|
python
|
## Code Before:
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super(UserViewSet, self).retrieve(request, pk)
## Instruction:
Use Python 3 style for super
## Code After:
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super().retrieve(request, pk)
|
...
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super().retrieve(request, pk)
...
|
465f73b14b9c9d416183c01cc5bb05c47d6e515f
|
src/org/robockets/stronghold/robot/commands/DrivePosition.java
|
src/org/robockets/stronghold/robot/commands/DrivePosition.java
|
package org.robockets.stronghold.robot.commands;
import org.robockets.stronghold.robot.highgoalshooter.MoveHood;
import org.robockets.stronghold.robot.highgoalshooter.MoveTurnTable;
import org.robockets.stronghold.robot.intake.IntakesUp;
import edu.wpi.first.wpilibj.command.CommandGroup;
/**
*
*/
public class DrivePosition extends CommandGroup {
public DrivePosition() {
addSequential(new MoveHood(-70));
addSequential(new IntakesUp());
addSequential(new MoveTurnTable(0));
}
}
|
package org.robockets.stronghold.robot.commands;
import org.robockets.stronghold.robot.highgoalshooter.MoveHood;
import org.robockets.stronghold.robot.highgoalshooter.MoveTurnTable;
import org.robockets.stronghold.robot.intake.IntakesUp;
import edu.wpi.first.wpilibj.command.CommandGroup;
/**
* Moves the robot into the position for driving around, shooting, and intaking ball
*/
public class DrivePosition extends CommandGroup {
public DrivePosition() {
addSequential(new MoveHood(-70));
addSequential(new IntakesUp());
addSequential(new MoveTurnTable(0));
}
}
|
Add javadocs for drive position
|
Add javadocs for drive position
|
Java
|
mit
|
Team4761/2016-Robot-Code
|
java
|
## Code Before:
package org.robockets.stronghold.robot.commands;
import org.robockets.stronghold.robot.highgoalshooter.MoveHood;
import org.robockets.stronghold.robot.highgoalshooter.MoveTurnTable;
import org.robockets.stronghold.robot.intake.IntakesUp;
import edu.wpi.first.wpilibj.command.CommandGroup;
/**
*
*/
public class DrivePosition extends CommandGroup {
public DrivePosition() {
addSequential(new MoveHood(-70));
addSequential(new IntakesUp());
addSequential(new MoveTurnTable(0));
}
}
## Instruction:
Add javadocs for drive position
## Code After:
package org.robockets.stronghold.robot.commands;
import org.robockets.stronghold.robot.highgoalshooter.MoveHood;
import org.robockets.stronghold.robot.highgoalshooter.MoveTurnTable;
import org.robockets.stronghold.robot.intake.IntakesUp;
import edu.wpi.first.wpilibj.command.CommandGroup;
/**
* Moves the robot into the position for driving around, shooting, and intaking ball
*/
public class DrivePosition extends CommandGroup {
public DrivePosition() {
addSequential(new MoveHood(-70));
addSequential(new IntakesUp());
addSequential(new MoveTurnTable(0));
}
}
|
// ... existing code ...
import edu.wpi.first.wpilibj.command.CommandGroup;
/**
* Moves the robot into the position for driving around, shooting, and intaking ball
*/
public class DrivePosition extends CommandGroup {
// ... rest of the code ...
|
24d4fee92c1c2ff4bac1fe09d9b436748234a48c
|
main.py
|
main.py
|
import sys
import argparse
from server import xml_server
from connection import http_connection
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', type=int, default=8080,
help="server's HTTP port")
parser.add_argument('--sensordata', type=str,
default='server/sensor_data.csv', help="sensor data file")
if __name__ == '__main__':
args = parser.parse_args()
server = xml_server.XMLServer(args.sensordata)
connection = http_connection.HttpConnection(server, port=args.port)
|
import sys
import argparse
from server import xml_server, defective_servers
from connection import http_connection
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', type=int, default=8080,
help="server's HTTP port")
parser.add_argument('--sensordata', type=str,
default='server/sensor_data.csv', help="sensor data file")
parser.add_argument('--randomloss', action='store_true')
if __name__ == '__main__':
args = parser.parse_args()
if args.randomloss:
server = defective_servers.RandomLossXMLServer(args.sensordata)
else:
server = xml_server.XMLServer(args.sensordata)
connection = http_connection.HttpConnection(server, port=args.port)
|
Add argument for execution of defective server.
|
Add argument for execution of defective server.
|
Python
|
apache-2.0
|
Solucionamos/dummybmc
|
python
|
## Code Before:
import sys
import argparse
from server import xml_server
from connection import http_connection
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', type=int, default=8080,
help="server's HTTP port")
parser.add_argument('--sensordata', type=str,
default='server/sensor_data.csv', help="sensor data file")
if __name__ == '__main__':
args = parser.parse_args()
server = xml_server.XMLServer(args.sensordata)
connection = http_connection.HttpConnection(server, port=args.port)
## Instruction:
Add argument for execution of defective server.
## Code After:
import sys
import argparse
from server import xml_server, defective_servers
from connection import http_connection
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port', type=int, default=8080,
help="server's HTTP port")
parser.add_argument('--sensordata', type=str,
default='server/sensor_data.csv', help="sensor data file")
parser.add_argument('--randomloss', action='store_true')
if __name__ == '__main__':
args = parser.parse_args()
if args.randomloss:
server = defective_servers.RandomLossXMLServer(args.sensordata)
else:
server = xml_server.XMLServer(args.sensordata)
connection = http_connection.HttpConnection(server, port=args.port)
|
...
import sys
import argparse
from server import xml_server, defective_servers
from connection import http_connection
parser = argparse.ArgumentParser()
...
help="server's HTTP port")
parser.add_argument('--sensordata', type=str,
default='server/sensor_data.csv', help="sensor data file")
parser.add_argument('--randomloss', action='store_true')
if __name__ == '__main__':
args = parser.parse_args()
if args.randomloss:
server = defective_servers.RandomLossXMLServer(args.sensordata)
else:
server = xml_server.XMLServer(args.sensordata)
connection = http_connection.HttpConnection(server, port=args.port)
...
|
c86a84d3beed29c4f8389b1172fde95f72d2d6ed
|
src/main/java/org/junit/experimental/categories/CategoryFilterFactory.java
|
src/main/java/org/junit/experimental/categories/CategoryFilterFactory.java
|
package org.junit.experimental.categories;
import java.util.ArrayList;
import java.util.List;
import org.junit.internal.Classes;
import org.junit.runner.FilterFactory;
import org.junit.runner.FilterFactoryParams;
import org.junit.runner.manipulation.Filter;
/**
* Implementation of FilterFactory for Category filtering.
*/
abstract class CategoryFilterFactory implements FilterFactory {
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given a
* {@link FilterFactoryParams} argument.
*
* @param params Parameters needed to create the {@link Filter}
* @throws FilterNotCreatedException
*/
@Override
public Filter createFilter(FilterFactoryParams params) throws FilterNotCreatedException {
try {
return createFilter(parseCategories(params.getArgs()));
} catch (ClassNotFoundException e) {
throw new FilterNotCreatedException(e);
}
}
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given an array of classes.
*
* @param categories Category classes.
*/
protected abstract Filter createFilter(Class<?>... categories);
private Class<?>[] parseCategories(String categories) throws ClassNotFoundException {
List<Class<?>> categoryClasses = new ArrayList<Class<?>>();
for (String category : categories.split(",")) {
Class<?> categoryClass = Classes.getClass(category);
categoryClasses.add(categoryClass);
}
return categoryClasses.toArray(new Class[]{});
}
}
|
package org.junit.experimental.categories;
import java.util.ArrayList;
import java.util.List;
import org.junit.internal.Classes;
import org.junit.runner.FilterFactory;
import org.junit.runner.FilterFactoryParams;
import org.junit.runner.manipulation.Filter;
/**
* Implementation of FilterFactory for Category filtering.
*/
abstract class CategoryFilterFactory implements FilterFactory {
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given a
* {@link FilterFactoryParams} argument.
*
* @param params Parameters needed to create the {@link Filter}
* @throws FilterNotCreatedException
*/
public Filter createFilter(FilterFactoryParams params) throws FilterNotCreatedException {
try {
return createFilter(parseCategories(params.getArgs()));
} catch (ClassNotFoundException e) {
throw new FilterNotCreatedException(e);
}
}
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given an array of classes.
*
* @param categories Category classes.
*/
protected abstract Filter createFilter(Class<?>... categories);
private Class<?>[] parseCategories(String categories) throws ClassNotFoundException {
List<Class<?>> categoryClasses = new ArrayList<Class<?>>();
for (String category : categories.split(",")) {
Class<?> categoryClass = Classes.getClass(category);
categoryClasses.add(categoryClass);
}
return categoryClasses.toArray(new Class[]{});
}
}
|
Make compilable under Java 1.5.
|
Make compilable under Java 1.5.
|
Java
|
epl-1.0
|
alb-i986/junit,UnimibSoftEngCourse1516/lab2-es3-s.renzo,onesfreedom/junit,janocat/junit,larrychen1990/junit,UnimibSoftEngCourse1516/lab2-es3-m.scarpone,UnimibSoftEngCourse1516/lab2-es3-f.cirelli1,UnimibSoftEngCourse1516/lab2-es3-f.spinardi,remus32/junit,Thothius/junit,mnk/junit,jhfjhfj1/junit,yusuke/junit,UnimibSoftEngCourse1516/lab2-es3-e.nani1,UnimibSoftEngCourse1516/lab2-es3-f.cirelli1,UnimibSoftEngCourse1516/lab2-es3-a.mosini,julien-sobczak/junit,eamonnmcmanus/junit,easyMan-zzy/junit,UnimibSoftEngCourse1516/lab2-es3-f.giannini3,vorburger/junit,vorburger/junit,junit-team/junit4,sposam/junit,jordancheah/junit,baev/junit,junit-team/junit4,laercioferracini/junit,stefanbirkner/junit,paulduffin/junit,UnimibSoftEngCourse1516/lab2-es3-f.cirelli1,UrsMetz/junit,freezhan/junit,hhariri/junit,mekwin87/junit4,dvberkel/junit,paulduffin/junit,dvberkel/junit,AxelMonroyX/junit4,panchenko/junit,mekwin87/junit4,powazny/junit4,avandeursen/junit,quentin9696/junit,openhardnudd/junit,UnimibSoftEngCourse1516/lab2-es3-s.ravetta,panchenko/junit,eamonnmcmanus/junit,kobe73er/MyUnit,UrsMetz/junit,Siddartha07/junit,chrisvest/junit,VikingDen/junit,UnimibSoftEngCourse1516/lab2-es3-s.ravetta,janocat/junit,slezier/junit,UnimibSoftEngCourse1516/lab2-es3-o.sertori,junit-team/junit,vorburger/junit,stefanbirkner/junit,nathanchen/JUnitCodeReading,kcooney/junit,hansjoachim/junit,smayoorans/junit,rwarren14/junit,UnimibSoftEngCourse1516/lab2-es3-a.tundo,UnimibSoftEngCourse1516/lab2-es3-e.nani1,mnk/junit,rws-github/junit,MingxuanChen/junit,larrychen1990/junit,Siddartha07/junit,witcxc/junit,marcphilipp/junit,laercioferracini/junit,UrsMetz/junit,mekwin87/junit4,y-kt/junit,junit-team/junit,kobe73er/MyUnit,kcooney/junit,powazny/junit4,witcxc/junit,baev/junit,schauder/junit,witcxc/junit,paulduffin/junit,freezhan/junit,chrisvest/junit,alohageck0/junit,feisuo/junit,dvberkel/junit,onesfreedom/junit,rws-github/junit,UnimibSoftEngCourse1516/lab2-es3-a.mosini,UnimibSoftEngCourse1516/lab2-es3-m.polonioli,adko-pl/junit,slezier/junit,0359xiaodong/junit,UnimibSoftEngCourse1516/lab2-es3-m.vella6,ashleyfrieze/junit,hansjoachim/junit,ashleyfrieze/junit,UnimibSoftEngCourse1516/lab2-es3-m.scarpone,UnimibSoftEngCourse1516/lab2-es3-e.nani1,vorburger/junit,smayoorans/junit,UnimibSoftEngCourse1516/lab2-es3-i.pigazzini,julien-sobczak/junit,alohageck0/junit,UnimibSoftEngCourse1516/lab2-es3-m.polonioli,MichaelJY91/junit,elijah513/junit,MichaelJY91/junit,hhariri/junit,UnimibSoftEngCourse1516/lab2-es3-i.pigazzini,UnimibSoftEngCourse1516/lab2-es3-f.giannini3,Clairebi/JUnit-Clone,Thothius/junit,JoaquinSiabra/junit,nathanchen/JUnitCodeReading,remus32/junit,onesfreedom/junit,UnimibSoftEngCourse1516/lab2-es3-l.salvestrini,avandeursen/junit,junit-team/junit4,flomotlik/junit,AxelMonroyX/junit4,moinuddin14/junit,baev/junit,quentin9696/junit,yusuke/junit,UnimibSoftEngCourse1516/lab2-es3-l.salvestrini,VikingDen/junit,GeeChao/junit,feisuo/junit,larrychen1990/junit,laercioferracini/junit,1234-/junit,UnimibSoftEngCourse1516/lab2-es3-m.vella6,mnk/junit,jordancheah/junit,UnimibSoftEngCourse1516/lab2-es3-o.sertori,stefanbirkner/junit,alohageck0/junit,UnimibSoftEngCourse1516/lab2-es3-o.sertori,schauder/junit,UnimibSoftEngCourse1516/lab2-es3-m.polonioli,edwardmlyte/junit,feisuo/junit,remus32/junit,MingxuanChen/junit,hansjoachim/junit,UnimibSoftEngCourse1516/lab2-es3-s.renzo,jordancheah/junit,Clairebi/JUnit-Clone,UrsMetz/junit,AxelMonroyX/junit4,jhfjhfj1/junit,MingxuanChen/junit,cherryleer/junit,GeeChao/junit,sposam/junit,junit-team/junit,VikingDen/junit,GeeChao/junit,jhfjhfj1/junit,smayoorans/junit,edwardmlyte/junit,MichaelJY91/junit,edwardmlyte/junit,elijah513/junit,Clairebi/JUnit-Clone,alb-i986/junit,1234-/junit,nathanchen/JUnitCodeReading,UnimibSoftEngCourse1516/lab2-es3-a.mosini,JoaquinSiabra/junit,rwarren14/junit,ashleyfrieze/junit,JoaquinSiabra/junit,easyMan-zzy/junit,UnimibSoftEngCourse1516/lab2-es3-f.giannini3,Thothius/junit,hhariri/junit,rws-github/junit,rws-github/junit,marcphilipp/junit,UnimibSoftEngCourse1516/lab2-es3-m.vella6,UnimibSoftEngCourse1516/lab2-es3-i.pigazzini,UnimibSoftEngCourse1516/lab2-es3-s.renzo,UnimibSoftEngCourse1516/lab2-es3-a.tundo,janocat/junit,quentin9696/junit,sposam/junit,yusuke/junit,nathanchen/JUnitCodeReading,freezhan/junit,kcooney/junit,0359xiaodong/junit,schauder/junit,flomotlik/junit,y-kt/junit,0359xiaodong/junit,kobe73er/MyUnit,flomotlik/junit,marcphilipp/junit,cherryleer/junit,mekwin87/junit4,avandeursen/junit,moinuddin14/junit,UnimibSoftEngCourse1516/lab2-es3-f.spinardi,chrisvest/junit,UnimibSoftEngCourse1516/lab2-es3-f.spinardi,julien-sobczak/junit,rws-github/junit,moinuddin14/junit,songfj/junit,slezier/junit,UnimibSoftEngCourse1516/lab2-es3-m.scarpone,kobe73er/dUnit,1234-/junit,kobe73er/dUnit,powazny/junit4,adko-pl/junit,UnimibSoftEngCourse1516/lab2-es3-a.tundo,alb-i986/junit,UnimibSoftEngCourse1516/lab2-es3-s.ravetta,openhardnudd/junit,cherryleer/junit,Siddartha07/junit,eamonnmcmanus/junit,songfj/junit,adko-pl/junit,y-kt/junit,rwarren14/junit,songfj/junit,openhardnudd/junit,UnimibSoftEngCourse1516/lab2-es3-l.salvestrini,kobe73er/dUnit,panchenko/junit,elijah513/junit,easyMan-zzy/junit
|
java
|
## Code Before:
package org.junit.experimental.categories;
import java.util.ArrayList;
import java.util.List;
import org.junit.internal.Classes;
import org.junit.runner.FilterFactory;
import org.junit.runner.FilterFactoryParams;
import org.junit.runner.manipulation.Filter;
/**
* Implementation of FilterFactory for Category filtering.
*/
abstract class CategoryFilterFactory implements FilterFactory {
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given a
* {@link FilterFactoryParams} argument.
*
* @param params Parameters needed to create the {@link Filter}
* @throws FilterNotCreatedException
*/
@Override
public Filter createFilter(FilterFactoryParams params) throws FilterNotCreatedException {
try {
return createFilter(parseCategories(params.getArgs()));
} catch (ClassNotFoundException e) {
throw new FilterNotCreatedException(e);
}
}
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given an array of classes.
*
* @param categories Category classes.
*/
protected abstract Filter createFilter(Class<?>... categories);
private Class<?>[] parseCategories(String categories) throws ClassNotFoundException {
List<Class<?>> categoryClasses = new ArrayList<Class<?>>();
for (String category : categories.split(",")) {
Class<?> categoryClass = Classes.getClass(category);
categoryClasses.add(categoryClass);
}
return categoryClasses.toArray(new Class[]{});
}
}
## Instruction:
Make compilable under Java 1.5.
## Code After:
package org.junit.experimental.categories;
import java.util.ArrayList;
import java.util.List;
import org.junit.internal.Classes;
import org.junit.runner.FilterFactory;
import org.junit.runner.FilterFactoryParams;
import org.junit.runner.manipulation.Filter;
/**
* Implementation of FilterFactory for Category filtering.
*/
abstract class CategoryFilterFactory implements FilterFactory {
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given a
* {@link FilterFactoryParams} argument.
*
* @param params Parameters needed to create the {@link Filter}
* @throws FilterNotCreatedException
*/
public Filter createFilter(FilterFactoryParams params) throws FilterNotCreatedException {
try {
return createFilter(parseCategories(params.getArgs()));
} catch (ClassNotFoundException e) {
throw new FilterNotCreatedException(e);
}
}
/**
* Creates a {@link org.junit.experimental.categories.Categories.CategoryFilter} given an array of classes.
*
* @param categories Category classes.
*/
protected abstract Filter createFilter(Class<?>... categories);
private Class<?>[] parseCategories(String categories) throws ClassNotFoundException {
List<Class<?>> categoryClasses = new ArrayList<Class<?>>();
for (String category : categories.split(",")) {
Class<?> categoryClass = Classes.getClass(category);
categoryClasses.add(categoryClass);
}
return categoryClasses.toArray(new Class[]{});
}
}
|
...
* @param params Parameters needed to create the {@link Filter}
* @throws FilterNotCreatedException
*/
public Filter createFilter(FilterFactoryParams params) throws FilterNotCreatedException {
try {
return createFilter(parseCategories(params.getArgs()));
...
|
91fc886bf302f9850977c8d88abba3bffd51928b
|
tests/test_compliance.py
|
tests/test_compliance.py
|
import os.path
import nose.tools as nose
import pep8
def test_pep8():
'''all Python files should comply with PEP 8'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
style_guide = pep8.StyleGuide(quiet=True)
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
|
import os.path
import nose.tools as nose
import pep8
import radon.complexity as radon
def test_pep8():
'''all Python files should comply with PEP 8'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
style_guide = pep8.StyleGuide(quiet=True)
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
def test_complexity():
'''all Python functions should have a low cyclomatic complexity score'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
with open(file_path, 'r') as file:
blocks = radon.cc_visit(file.read())
for block in blocks:
complexity = block.complexity
test_doc = '{} ({}) should have a low complexity score'
test_complexity.__doc__ = test_doc.format(
block.name, file_path)
fail_msg = '{} ({}) has a complexity of {}'.format(
block.name, file_path, complexity)
yield nose.assert_less_equal, complexity, 10, fail_msg
|
Add test generator for function complexity
|
Add test generator for function complexity
|
Python
|
mit
|
caleb531/ssh-wp-backup,caleb531/ssh-wp-backup
|
python
|
## Code Before:
import os.path
import nose.tools as nose
import pep8
def test_pep8():
'''all Python files should comply with PEP 8'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
style_guide = pep8.StyleGuide(quiet=True)
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
## Instruction:
Add test generator for function complexity
## Code After:
import os.path
import nose.tools as nose
import pep8
import radon.complexity as radon
def test_pep8():
'''all Python files should comply with PEP 8'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
style_guide = pep8.StyleGuide(quiet=True)
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
def test_complexity():
'''all Python functions should have a low cyclomatic complexity score'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
with open(file_path, 'r') as file:
blocks = radon.cc_visit(file.read())
for block in blocks:
complexity = block.complexity
test_doc = '{} ({}) should have a low complexity score'
test_complexity.__doc__ = test_doc.format(
block.name, file_path)
fail_msg = '{} ({}) has a complexity of {}'.format(
block.name, file_path, complexity)
yield nose.assert_less_equal, complexity, 10, fail_msg
|
# ... existing code ...
import os.path
import nose.tools as nose
import pep8
import radon.complexity as radon
def test_pep8():
# ... modified code ...
total_errors = style_guide.input_file(file_path)
msg = '{} does not comply with PEP 8'.format(file_path)
yield nose.assert_equal, total_errors, 0, msg
def test_complexity():
'''all Python functions should have a low cyclomatic complexity score'''
for subdir_path, subdir_names, file_names in os.walk('.'):
if '.git' in subdir_names:
subdir_names.remove('.git')
for file_name in file_names:
file_path = os.path.join(subdir_path, file_name)
file_ext = os.path.splitext(file_name)[1]
if file_ext == '.py':
with open(file_path, 'r') as file:
blocks = radon.cc_visit(file.read())
for block in blocks:
complexity = block.complexity
test_doc = '{} ({}) should have a low complexity score'
test_complexity.__doc__ = test_doc.format(
block.name, file_path)
fail_msg = '{} ({}) has a complexity of {}'.format(
block.name, file_path, complexity)
yield nose.assert_less_equal, complexity, 10, fail_msg
# ... rest of the code ...
|
a4f41648cd0318694d551b067309539df475c2d7
|
tests/test_function_calls.py
|
tests/test_function_calls.py
|
from thinglang.runner import run
def test_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
|
from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip()
|
Test for method argument calls
|
Test for method argument calls
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
python
|
## Code Before:
from thinglang.runner import run
def test_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
## Instruction:
Test for method argument calls
## Code After:
from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip()
|
...
from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
...
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip()
...
|
3350085ed7177cdc387d162a71073b787ba401be
|
simulator/mips/mips.h
|
simulator/mips/mips.h
|
/**
* mips.h - all the aliases to MIPS ISA
* @author Aleksandr Misevich
* Copyright 2018 MIPT-MIPS
*/
#ifndef MIPS_H_
#define MIPS_H_
#include <infra/instrcache/instr_cache_memory.h>
#include "mips_instr.h"
struct MIPS
{
using FuncInstr = MIPSInstr;
using Register = MIPSRegister;
using Memory = InstrMemory<MIPSInstr>;
using RegisterUInt = uint32;
using RegDstUInt = uint64;
};
#endif // MIPS_H_
|
/**
* mips.h - all the aliases to MIPS ISA
* @author Aleksandr Misevich
* Copyright 2018 MIPT-MIPS
*/
#ifndef MIPS_H_
#define MIPS_H_
#include <infra/instrcache/instr_cache_memory.h>
#include "mips_instr.h"
struct MIPS
{
using FuncInstr = MIPSInstr;
using Register = MIPSRegister;
using Memory = InstrMemory<MIPSInstr>;
using RegisterUInt = uint32;
using RegDstUInt = doubled_t<uint32>; // MIPS may produce output to 2x HI/LO register
};
#endif // MIPS_H_
|
Use doubled_t for MIPS defines
|
Use doubled_t for MIPS defines
|
C
|
mit
|
MIPT-ILab/mipt-mips-2015,MIPT-ILab/mipt-mips-2015,MIPT-ILab/mipt-mips,MIPT-ILab/mipt-mips,MIPT-ILab/mipt-mips-2015,MIPT-ILab/mipt-mips-2015
|
c
|
## Code Before:
/**
* mips.h - all the aliases to MIPS ISA
* @author Aleksandr Misevich
* Copyright 2018 MIPT-MIPS
*/
#ifndef MIPS_H_
#define MIPS_H_
#include <infra/instrcache/instr_cache_memory.h>
#include "mips_instr.h"
struct MIPS
{
using FuncInstr = MIPSInstr;
using Register = MIPSRegister;
using Memory = InstrMemory<MIPSInstr>;
using RegisterUInt = uint32;
using RegDstUInt = uint64;
};
#endif // MIPS_H_
## Instruction:
Use doubled_t for MIPS defines
## Code After:
/**
* mips.h - all the aliases to MIPS ISA
* @author Aleksandr Misevich
* Copyright 2018 MIPT-MIPS
*/
#ifndef MIPS_H_
#define MIPS_H_
#include <infra/instrcache/instr_cache_memory.h>
#include "mips_instr.h"
struct MIPS
{
using FuncInstr = MIPSInstr;
using Register = MIPSRegister;
using Memory = InstrMemory<MIPSInstr>;
using RegisterUInt = uint32;
using RegDstUInt = doubled_t<uint32>; // MIPS may produce output to 2x HI/LO register
};
#endif // MIPS_H_
|
...
using Register = MIPSRegister;
using Memory = InstrMemory<MIPSInstr>;
using RegisterUInt = uint32;
using RegDstUInt = doubled_t<uint32>; // MIPS may produce output to 2x HI/LO register
};
#endif // MIPS_H_
...
|
84db08e99c16d5bf3e5c035880adb440e02099fd
|
butterknife-lint/src/main/java/butterknife/lint/LintRegistry.java
|
butterknife-lint/src/main/java/butterknife/lint/LintRegistry.java
|
package butterknife.lint;
import com.android.tools.lint.client.api.IssueRegistry;
import com.android.tools.lint.detector.api.Issue;
import com.google.common.collect.ImmutableList;
import java.util.List;
/**
* Contains references to all custom lint checks for butterknife.
*/
public class LintRegistry extends IssueRegistry {
@Override public List<Issue> getIssues() {
return ImmutableList.of(InvalidR2UsageDetector.ISSUE);
}
}
|
package butterknife.lint;
import com.android.tools.lint.client.api.IssueRegistry;
import com.android.tools.lint.detector.api.ApiKt;
import com.android.tools.lint.detector.api.Issue;
import com.google.common.collect.ImmutableList;
import java.util.List;
/**
* Contains references to all custom lint checks for butterknife.
*/
public class LintRegistry extends IssueRegistry {
@Override public List<Issue> getIssues() {
return ImmutableList.of(InvalidR2UsageDetector.ISSUE);
}
@Override public int getApi() {
return ApiKt.CURRENT_API;
}
}
|
Update Lint checks to new APIs
|
Update Lint checks to new APIs
|
Java
|
apache-2.0
|
JakeWharton/butterknife,ze-pequeno/butterknife,JakeWharton/butterknife,hzsweers/butterknife,ze-pequeno/butterknife,ze-pequeno/butterknife,JakeWharton/butterknife,hzsweers/butterknife,JakeWharton/butterknife,hzsweers/butterknife,ze-pequeno/butterknife
|
java
|
## Code Before:
package butterknife.lint;
import com.android.tools.lint.client.api.IssueRegistry;
import com.android.tools.lint.detector.api.Issue;
import com.google.common.collect.ImmutableList;
import java.util.List;
/**
* Contains references to all custom lint checks for butterknife.
*/
public class LintRegistry extends IssueRegistry {
@Override public List<Issue> getIssues() {
return ImmutableList.of(InvalidR2UsageDetector.ISSUE);
}
}
## Instruction:
Update Lint checks to new APIs
## Code After:
package butterknife.lint;
import com.android.tools.lint.client.api.IssueRegistry;
import com.android.tools.lint.detector.api.ApiKt;
import com.android.tools.lint.detector.api.Issue;
import com.google.common.collect.ImmutableList;
import java.util.List;
/**
* Contains references to all custom lint checks for butterknife.
*/
public class LintRegistry extends IssueRegistry {
@Override public List<Issue> getIssues() {
return ImmutableList.of(InvalidR2UsageDetector.ISSUE);
}
@Override public int getApi() {
return ApiKt.CURRENT_API;
}
}
|
# ... existing code ...
package butterknife.lint;
import com.android.tools.lint.client.api.IssueRegistry;
import com.android.tools.lint.detector.api.ApiKt;
import com.android.tools.lint.detector.api.Issue;
import com.google.common.collect.ImmutableList;
import java.util.List;
# ... modified code ...
@Override public List<Issue> getIssues() {
return ImmutableList.of(InvalidR2UsageDetector.ISSUE);
}
@Override public int getApi() {
return ApiKt.CURRENT_API;
}
}
# ... rest of the code ...
|
3586a75f52cc12950270437a7f50ef0c8149b5ae
|
src/main/java/openmods/container/FakeSlot.java
|
src/main/java/openmods/container/FakeSlot.java
|
package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
|
package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
onSlotChanged();
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
|
Fix update on fake slot
|
Fix update on fake slot
|
Java
|
mit
|
nevercast/OpenModsLib,OpenMods/OpenModsLib,OpenMods/OpenModsLib
|
java
|
## Code Before:
package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
## Instruction:
Fix update on fake slot
## Code After:
package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
onSlotChanged();
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
|
# ... existing code ...
}
inventory.setInventorySlotContents(slotNumber, place);
onSlotChanged();
return place;
}
# ... rest of the code ...
|
cc80f90a4f003c0967c31d5177971061350ee683
|
pycall/call.py
|
pycall/call.py
|
"""A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = int(wait_time)
self.max_retries = int(max_retries)
|
"""A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = wait_time
self.max_retries = max_retries
|
Revert "Forcing type coersion for int params."
|
Revert "Forcing type coersion for int params."
This is a pointless bit of code. Since we lazy-evaluate them anyhow, it's a
duplicate effort.
This reverts commit 1ca6b96d492f8f33ac3b3a520937378effb66744.
|
Python
|
unlicense
|
rdegges/pycall
|
python
|
## Code Before:
"""A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = int(wait_time)
self.max_retries = int(max_retries)
## Instruction:
Revert "Forcing type coersion for int params."
This is a pointless bit of code. Since we lazy-evaluate them anyhow, it's a
duplicate effort.
This reverts commit 1ca6b96d492f8f33ac3b3a520937378effb66744.
## Code After:
"""A simple wrapper for Asterisk calls."""
class Call(object):
"""Stores and manipulates Asterisk calls."""
def __init__(self, channel, callerid=None, account=None, wait_time=None,
max_retries=None):
"""Create a new `Call` object.
:param str channel: The Asterisk channel to call. Should be in standard
Asterisk format.
:param str callerid: CallerID to use.
:param str account: Account code to associate with this call.
:param int wait_time: Amount of time to wait (in seconds) between
retry attempts.
:param int max_retries: Maximum amount of retry attempts.
"""
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = wait_time
self.max_retries = max_retries
|
# ... existing code ...
self.channel = channel
self.callerid = callerid
self.account = account
self.wait_time = wait_time
self.max_retries = max_retries
# ... rest of the code ...
|
28f504dccd02046604761e997f929015a285dffd
|
pyQuantuccia/tests/test_get_holiday_date.py
|
pyQuantuccia/tests/test_get_holiday_date.py
|
from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
|
from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
|
Add some bogus tests to try and get this info.
|
Add some bogus tests to try and get this info.
|
Python
|
bsd-3-clause
|
jwg4/pyQuantuccia,jwg4/pyQuantuccia
|
python
|
## Code Before:
from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
## Instruction:
Add some bogus tests to try and get this info.
## Code After:
from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
|
// ... existing code ...
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
// ... rest of the code ...
|
4688d48ceeb365174353ab710d03c39dda10a115
|
tssim/__init__.py
|
tssim/__init__.py
|
__author__ = """Franz Woellert"""
__email__ = '[email protected]'
__version__ = '0.1.0'
|
__author__ = """Franz Woellert"""
__email__ = '[email protected]'
__version__ = '0.1.0'
from tssim.core.series import TimeSeries
from tssim.core.function import TimeFunction
from tssim.core.track import TimeTrack
from tssim.functions import random
|
Adjust module and class references to accessible from package top level.
|
Adjust module and class references to accessible from package top level.
|
Python
|
mit
|
mansenfranzen/tssim
|
python
|
## Code Before:
__author__ = """Franz Woellert"""
__email__ = '[email protected]'
__version__ = '0.1.0'
## Instruction:
Adjust module and class references to accessible from package top level.
## Code After:
__author__ = """Franz Woellert"""
__email__ = '[email protected]'
__version__ = '0.1.0'
from tssim.core.series import TimeSeries
from tssim.core.function import TimeFunction
from tssim.core.track import TimeTrack
from tssim.functions import random
|
// ... existing code ...
__author__ = """Franz Woellert"""
__email__ = '[email protected]'
__version__ = '0.1.0'
from tssim.core.series import TimeSeries
from tssim.core.function import TimeFunction
from tssim.core.track import TimeTrack
from tssim.functions import random
// ... rest of the code ...
|
660f1f9265fd00f6ec8db16190293ed0b88481b8
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='emopy',
version='0.1',
description='Emotion Recognition Package for Python',
url='http://github.com/selameab/emopy',
author='Selameab',
author_email='[email protected]',
license='',
package_data={'emopy': ['models/*.h5', 'models/*.json']},
include_package_data=True,
packages=['emopy'],
dependency_links=["https://github.com/tensorflow/tensorflow/tarball/master"],
install_requires=[
'dlib',
'tensorflow',
'keras>=2.0'
],
zip_safe=False)
|
from setuptools import setup
setup(name='emopy',
version='0.1',
description='Emotion Recognition Package for Python',
url='http://github.com/selameab/emopy',
author='Selameab',
author_email='[email protected]',
license='',
package_data={'emopy': ['models/*.h5', 'models/*.json']},
include_package_data=True,
packages=['emopy'],
dependency_links=["https://github.com/tensorflow/tensorflow/tarball/master"],
install_requires=[
'dlib',
'tensorflow',
'keras>=2.0',
'h5py'
],
zip_safe=False)
|
Add h5py as a dependency
|
Add h5py as a dependency
|
Python
|
mit
|
Selameab/emopy
|
python
|
## Code Before:
from setuptools import setup
setup(name='emopy',
version='0.1',
description='Emotion Recognition Package for Python',
url='http://github.com/selameab/emopy',
author='Selameab',
author_email='[email protected]',
license='',
package_data={'emopy': ['models/*.h5', 'models/*.json']},
include_package_data=True,
packages=['emopy'],
dependency_links=["https://github.com/tensorflow/tensorflow/tarball/master"],
install_requires=[
'dlib',
'tensorflow',
'keras>=2.0'
],
zip_safe=False)
## Instruction:
Add h5py as a dependency
## Code After:
from setuptools import setup
setup(name='emopy',
version='0.1',
description='Emotion Recognition Package for Python',
url='http://github.com/selameab/emopy',
author='Selameab',
author_email='[email protected]',
license='',
package_data={'emopy': ['models/*.h5', 'models/*.json']},
include_package_data=True,
packages=['emopy'],
dependency_links=["https://github.com/tensorflow/tensorflow/tarball/master"],
install_requires=[
'dlib',
'tensorflow',
'keras>=2.0',
'h5py'
],
zip_safe=False)
|
# ... existing code ...
install_requires=[
'dlib',
'tensorflow',
'keras>=2.0',
'h5py'
],
zip_safe=False)
# ... rest of the code ...
|
c1343c392a45d2069b893841f82bf426462bef55
|
threadmanager.py
|
threadmanager.py
|
import logsupport
from logsupport import ConsoleWarning
HelperThreads = {}
class ThreadItem(object):
def __init__(self, name, start, restart):
self.name = name
self.StartThread = start
self.RestartThread = restart
self.Thread = None
def CheckThreads():
for T in HelperThreads.values():
if not T.Thread.is_alive():
logsupport.Logs.Log("Thread for: "+T.name+" died; restarting",severity=ConsoleWarning)
T.RestartThread()
def StartThreads():
for T in HelperThreads.values():
T.StartThread()
logsupport.Logs.Log("Starting helper thread for: ", T.name)
|
import logsupport
from logsupport import ConsoleWarning
HelperThreads = {}
class ThreadItem(object):
def __init__(self, name, start, restart):
self.name = name
self.StartThread = start
self.RestartThread = restart
self.Thread = None
def StopThread(self):
self.Thread.stop()
def CheckThreads():
for T in HelperThreads.values():
if not T.Thread.is_alive():
logsupport.Logs.Log("Thread for: "+T.name+" died; restarting",severity=ConsoleWarning)
T.Thread = T.RestartThread(T)
def StartThreads():
for T in HelperThreads.values():
logsupport.Logs.Log("Starting helper thread for: ", T.name)
T.Thread = T.StartThread()
|
Add a stop thread - may be needed for loss of heartbeat case
|
Add a stop thread - may be needed for loss of heartbeat case
|
Python
|
apache-2.0
|
kevinkahn/softconsole,kevinkahn/softconsole
|
python
|
## Code Before:
import logsupport
from logsupport import ConsoleWarning
HelperThreads = {}
class ThreadItem(object):
def __init__(self, name, start, restart):
self.name = name
self.StartThread = start
self.RestartThread = restart
self.Thread = None
def CheckThreads():
for T in HelperThreads.values():
if not T.Thread.is_alive():
logsupport.Logs.Log("Thread for: "+T.name+" died; restarting",severity=ConsoleWarning)
T.RestartThread()
def StartThreads():
for T in HelperThreads.values():
T.StartThread()
logsupport.Logs.Log("Starting helper thread for: ", T.name)
## Instruction:
Add a stop thread - may be needed for loss of heartbeat case
## Code After:
import logsupport
from logsupport import ConsoleWarning
HelperThreads = {}
class ThreadItem(object):
def __init__(self, name, start, restart):
self.name = name
self.StartThread = start
self.RestartThread = restart
self.Thread = None
def StopThread(self):
self.Thread.stop()
def CheckThreads():
for T in HelperThreads.values():
if not T.Thread.is_alive():
logsupport.Logs.Log("Thread for: "+T.name+" died; restarting",severity=ConsoleWarning)
T.Thread = T.RestartThread(T)
def StartThreads():
for T in HelperThreads.values():
logsupport.Logs.Log("Starting helper thread for: ", T.name)
T.Thread = T.StartThread()
|
...
self.RestartThread = restart
self.Thread = None
def StopThread(self):
self.Thread.stop()
def CheckThreads():
for T in HelperThreads.values():
if not T.Thread.is_alive():
logsupport.Logs.Log("Thread for: "+T.name+" died; restarting",severity=ConsoleWarning)
T.Thread = T.RestartThread(T)
def StartThreads():
for T in HelperThreads.values():
logsupport.Logs.Log("Starting helper thread for: ", T.name)
T.Thread = T.StartThread()
...
|
5b7bc8baba35bc816c7dc94768d9fae05c7b78ec
|
zephyr/shim/include/zephyr_host_command.h
|
zephyr/shim/include/zephyr_host_command.h
|
/* Copyright 2021 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#if !defined(__CROS_EC_HOST_COMMAND_H) || \
defined(__CROS_EC_ZEPHYR_HOST_COMMAND_H)
#error "This file must only be included from host_command.h. " \
"Include host_command.h directly"
#endif
#define __CROS_EC_ZEPHYR_HOST_COMMAND_H
#include <init.h>
#ifdef CONFIG_PLATFORM_EC_HOSTCMD
/**
* See include/host_command.h for documentation.
*/
#define DECLARE_HOST_COMMAND(_command, _routine, _version_mask) \
STRUCT_SECTION_ITERABLE(host_command, _cros_hcmd_##_command) = { \
.command = _command, \
.handler = _routine, \
.version_mask = _version_mask, \
}
#else /* !CONFIG_PLATFORM_EC_HOSTCMD */
#ifdef __clang__
#define DECLARE_HOST_COMMAND(command, routine, version_mask)
#else
#define DECLARE_HOST_COMMAND(command, routine, version_mask) \
enum ec_status (routine)(struct host_cmd_handler_args *args) \
__attribute__((unused))
#endif /* __clang__ */
#endif /* CONFIG_PLATFORM_EC_HOSTCMD */
|
/* Copyright 2021 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#if !defined(__CROS_EC_HOST_COMMAND_H) || \
defined(__CROS_EC_ZEPHYR_HOST_COMMAND_H)
#error "This file must only be included from host_command.h. " \
"Include host_command.h directly"
#endif
#define __CROS_EC_ZEPHYR_HOST_COMMAND_H
#include <init.h>
#ifdef CONFIG_PLATFORM_EC_HOSTCMD
/**
* See include/host_command.h for documentation.
*/
#define DECLARE_HOST_COMMAND(_command, _routine, _version_mask) \
STRUCT_SECTION_ITERABLE(host_command, _cros_hcmd_##_command) = { \
.command = _command, \
.handler = _routine, \
.version_mask = _version_mask, \
}
#else /* !CONFIG_PLATFORM_EC_HOSTCMD */
/*
* Create a fake routine to call the function. The linker should
* garbage-collect it since it is behind 'if (0)'
*/
#define DECLARE_HOST_COMMAND(command, routine, version_mask) \
int __remove_ ## command(void) \
{ \
if (0) \
routine(NULL); \
return 0; \
}
#endif /* CONFIG_PLATFORM_EC_HOSTCMD */
|
Use a different way of handling no host commands
|
zephyr: Use a different way of handling no host commands
When CONFIG_PLATFORM_EC_HOSTCMD is not enabled we want to silently drop
the handler routines from the build. The current approach works for gcc
but not for clang.
Use an exported function instead.
BUG=b:208648337
BRANCH=none
TEST=CQ and gitlab
Signed-off-by: Simon Glass <[email protected]>
Change-Id: I63f74e8081556c726472782f60bddbbfbc3e9bf0
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3313320
Reviewed-by: Jeremy Bettis <[email protected]>
|
C
|
bsd-3-clause
|
coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec,coreboot/chrome-ec
|
c
|
## Code Before:
/* Copyright 2021 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#if !defined(__CROS_EC_HOST_COMMAND_H) || \
defined(__CROS_EC_ZEPHYR_HOST_COMMAND_H)
#error "This file must only be included from host_command.h. " \
"Include host_command.h directly"
#endif
#define __CROS_EC_ZEPHYR_HOST_COMMAND_H
#include <init.h>
#ifdef CONFIG_PLATFORM_EC_HOSTCMD
/**
* See include/host_command.h for documentation.
*/
#define DECLARE_HOST_COMMAND(_command, _routine, _version_mask) \
STRUCT_SECTION_ITERABLE(host_command, _cros_hcmd_##_command) = { \
.command = _command, \
.handler = _routine, \
.version_mask = _version_mask, \
}
#else /* !CONFIG_PLATFORM_EC_HOSTCMD */
#ifdef __clang__
#define DECLARE_HOST_COMMAND(command, routine, version_mask)
#else
#define DECLARE_HOST_COMMAND(command, routine, version_mask) \
enum ec_status (routine)(struct host_cmd_handler_args *args) \
__attribute__((unused))
#endif /* __clang__ */
#endif /* CONFIG_PLATFORM_EC_HOSTCMD */
## Instruction:
zephyr: Use a different way of handling no host commands
When CONFIG_PLATFORM_EC_HOSTCMD is not enabled we want to silently drop
the handler routines from the build. The current approach works for gcc
but not for clang.
Use an exported function instead.
BUG=b:208648337
BRANCH=none
TEST=CQ and gitlab
Signed-off-by: Simon Glass <[email protected]>
Change-Id: I63f74e8081556c726472782f60bddbbfbc3e9bf0
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/ec/+/3313320
Reviewed-by: Jeremy Bettis <[email protected]>
## Code After:
/* Copyright 2021 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#if !defined(__CROS_EC_HOST_COMMAND_H) || \
defined(__CROS_EC_ZEPHYR_HOST_COMMAND_H)
#error "This file must only be included from host_command.h. " \
"Include host_command.h directly"
#endif
#define __CROS_EC_ZEPHYR_HOST_COMMAND_H
#include <init.h>
#ifdef CONFIG_PLATFORM_EC_HOSTCMD
/**
* See include/host_command.h for documentation.
*/
#define DECLARE_HOST_COMMAND(_command, _routine, _version_mask) \
STRUCT_SECTION_ITERABLE(host_command, _cros_hcmd_##_command) = { \
.command = _command, \
.handler = _routine, \
.version_mask = _version_mask, \
}
#else /* !CONFIG_PLATFORM_EC_HOSTCMD */
/*
* Create a fake routine to call the function. The linker should
* garbage-collect it since it is behind 'if (0)'
*/
#define DECLARE_HOST_COMMAND(command, routine, version_mask) \
int __remove_ ## command(void) \
{ \
if (0) \
routine(NULL); \
return 0; \
}
#endif /* CONFIG_PLATFORM_EC_HOSTCMD */
|
// ... existing code ...
.version_mask = _version_mask, \
}
#else /* !CONFIG_PLATFORM_EC_HOSTCMD */
/*
* Create a fake routine to call the function. The linker should
* garbage-collect it since it is behind 'if (0)'
*/
#define DECLARE_HOST_COMMAND(command, routine, version_mask) \
int __remove_ ## command(void) \
{ \
if (0) \
routine(NULL); \
return 0; \
}
#endif /* CONFIG_PLATFORM_EC_HOSTCMD */
// ... rest of the code ...
|
c801a015d1bd47060c1cfdfcecaac085de4b7fcf
|
integration-tests/src/test/java/arez/integration/observable_component/ObservableComponentIntegrationTest.java
|
integration-tests/src/test/java/arez/integration/observable_component/ObservableComponentIntegrationTest.java
|
package arez.integration.observable_component;
import arez.Arez;
import arez.Disposable;
import arez.annotations.ArezComponent;
import arez.annotations.Feature;
import arez.component.ComponentObservable;
import arez.integration.AbstractArezIntegrationTest;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nonnull;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class ObservableComponentIntegrationTest
extends AbstractArezIntegrationTest
{
@Test
public void disposeNotifiesComponentObservable()
throws Throwable
{
final Model model = Model.create();
final AtomicInteger callCount = new AtomicInteger();
Arez.context().autorun( () -> {
observeADependency();
ComponentObservable.observe( model );
callCount.incrementAndGet();
} );
assertEquals( callCount.get(), 1 );
Disposable.dispose( model );
assertEquals( callCount.get(), 2 );
}
@Test
public void disposeNoNotifiesWhenNotComponentObservable()
throws Throwable
{
final Model2 model = Model2.create();
final AtomicInteger callCount = new AtomicInteger();
Arez.context().autorun( () -> {
observeADependency();
ComponentObservable.observe( model );
callCount.incrementAndGet();
} );
assertEquals( callCount.get(), 1 );
Disposable.dispose( model );
assertEquals( callCount.get(), 1 );
}
@ArezComponent( allowEmpty = true, observable = Feature.ENABLE )
static abstract class Model
{
@Nonnull
static Model create()
{
return new ObservableComponentIntegrationTest_Arez_Model();
}
}
@ArezComponent( allowEmpty = true, observable = Feature.DISABLE )
static abstract class Model2
{
@Nonnull
static Model2 create()
{
return new ObservableComponentIntegrationTest_Arez_Model2();
}
}
}
|
package arez.integration.observable_component;
import arez.Arez;
import arez.Disposable;
import arez.annotations.ArezComponent;
import arez.annotations.Feature;
import arez.component.ComponentObservable;
import arez.integration.AbstractArezIntegrationTest;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nonnull;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class ObservableComponentIntegrationTest
extends AbstractArezIntegrationTest
{
@Test
public void disposeNotifiesComponentObservable()
throws Throwable
{
final Model model = Model.create();
final AtomicInteger callCount = new AtomicInteger();
Arez.context().autorun( () -> {
observeADependency();
ComponentObservable.observe( model );
callCount.incrementAndGet();
} );
assertEquals( callCount.get(), 1 );
Disposable.dispose( model );
assertEquals( callCount.get(), 2 );
}
@ArezComponent( allowEmpty = true, observable = Feature.ENABLE )
static abstract class Model
{
@Nonnull
static Model create()
{
return new ObservableComponentIntegrationTest_Arez_Model();
}
}
}
|
Remove test that no longer makes any sense
|
Remove test that no longer makes any sense
|
Java
|
apache-2.0
|
realityforge/arez,realityforge/arez,realityforge/arez
|
java
|
## Code Before:
package arez.integration.observable_component;
import arez.Arez;
import arez.Disposable;
import arez.annotations.ArezComponent;
import arez.annotations.Feature;
import arez.component.ComponentObservable;
import arez.integration.AbstractArezIntegrationTest;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nonnull;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class ObservableComponentIntegrationTest
extends AbstractArezIntegrationTest
{
@Test
public void disposeNotifiesComponentObservable()
throws Throwable
{
final Model model = Model.create();
final AtomicInteger callCount = new AtomicInteger();
Arez.context().autorun( () -> {
observeADependency();
ComponentObservable.observe( model );
callCount.incrementAndGet();
} );
assertEquals( callCount.get(), 1 );
Disposable.dispose( model );
assertEquals( callCount.get(), 2 );
}
@Test
public void disposeNoNotifiesWhenNotComponentObservable()
throws Throwable
{
final Model2 model = Model2.create();
final AtomicInteger callCount = new AtomicInteger();
Arez.context().autorun( () -> {
observeADependency();
ComponentObservable.observe( model );
callCount.incrementAndGet();
} );
assertEquals( callCount.get(), 1 );
Disposable.dispose( model );
assertEquals( callCount.get(), 1 );
}
@ArezComponent( allowEmpty = true, observable = Feature.ENABLE )
static abstract class Model
{
@Nonnull
static Model create()
{
return new ObservableComponentIntegrationTest_Arez_Model();
}
}
@ArezComponent( allowEmpty = true, observable = Feature.DISABLE )
static abstract class Model2
{
@Nonnull
static Model2 create()
{
return new ObservableComponentIntegrationTest_Arez_Model2();
}
}
}
## Instruction:
Remove test that no longer makes any sense
## Code After:
package arez.integration.observable_component;
import arez.Arez;
import arez.Disposable;
import arez.annotations.ArezComponent;
import arez.annotations.Feature;
import arez.component.ComponentObservable;
import arez.integration.AbstractArezIntegrationTest;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nonnull;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class ObservableComponentIntegrationTest
extends AbstractArezIntegrationTest
{
@Test
public void disposeNotifiesComponentObservable()
throws Throwable
{
final Model model = Model.create();
final AtomicInteger callCount = new AtomicInteger();
Arez.context().autorun( () -> {
observeADependency();
ComponentObservable.observe( model );
callCount.incrementAndGet();
} );
assertEquals( callCount.get(), 1 );
Disposable.dispose( model );
assertEquals( callCount.get(), 2 );
}
@ArezComponent( allowEmpty = true, observable = Feature.ENABLE )
static abstract class Model
{
@Nonnull
static Model create()
{
return new ObservableComponentIntegrationTest_Arez_Model();
}
}
}
|
// ... existing code ...
assertEquals( callCount.get(), 2 );
}
@ArezComponent( allowEmpty = true, observable = Feature.ENABLE )
static abstract class Model
{
// ... modified code ...
return new ObservableComponentIntegrationTest_Arez_Model();
}
}
}
// ... rest of the code ...
|
e8d321c35d6e0a8294e0766c3836efe192ae2df0
|
print_items_needing_requeue.py
|
print_items_needing_requeue.py
|
import os
import sys
try:
import simplejson as json
except ImportError:
import json
basename = os.path.basename
def main():
basedirs = sys.argv[1:]
valids = set()
invalids = set()
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
print "Skipping dotdir %r" % (directory,)
continue
for f in filenames:
if f.startswith("."):
print "Skipping dotfile %r" % (f,)
continue
fname = os.path.join(directory, f)
if fname.endswith(".verification"):
with open(fname, "rb") as fh:
for line in fh:
data = json.loads(line)
if data["valid"]:
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
needs_requeue = sorted(invalids - valids)
for item_name in needs_requeue:
print item_name
if __name__ == '__main__':
main()
|
import os
import sys
try:
import simplejson as json
except ImportError:
import json
basename = os.path.basename
def main():
greader_items = sys.argv[1]
basedirs = sys.argv[2:]
assert basedirs, "Give me some basedirs containing .verification files"
valids = set()
invalids = set()
largest = 0
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
continue
for f in filenames:
if f.startswith("."):
continue
fname = os.path.join(directory, f)
if fname.endswith(".verification"):
with open(fname, "rb") as fh:
for line in fh:
data = json.loads(line)
if data["valid"]:
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
largest = max(largest, int(data["item_name"], 10))
for n in xrange(largest):
item_name = str(n).zfill(10)
if not item_name in valids and os.path.exists(greader_items + '/' + item_name[:6] + '/' + item_name + '.gz'):
print item_name
if __name__ == '__main__':
main()
|
Print items that are bad *or* missing
|
Print items that are bad *or* missing
|
Python
|
mit
|
ludios/greader-warc-checker
|
python
|
## Code Before:
import os
import sys
try:
import simplejson as json
except ImportError:
import json
basename = os.path.basename
def main():
basedirs = sys.argv[1:]
valids = set()
invalids = set()
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
print "Skipping dotdir %r" % (directory,)
continue
for f in filenames:
if f.startswith("."):
print "Skipping dotfile %r" % (f,)
continue
fname = os.path.join(directory, f)
if fname.endswith(".verification"):
with open(fname, "rb") as fh:
for line in fh:
data = json.loads(line)
if data["valid"]:
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
needs_requeue = sorted(invalids - valids)
for item_name in needs_requeue:
print item_name
if __name__ == '__main__':
main()
## Instruction:
Print items that are bad *or* missing
## Code After:
import os
import sys
try:
import simplejson as json
except ImportError:
import json
basename = os.path.basename
def main():
greader_items = sys.argv[1]
basedirs = sys.argv[2:]
assert basedirs, "Give me some basedirs containing .verification files"
valids = set()
invalids = set()
largest = 0
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
continue
for f in filenames:
if f.startswith("."):
continue
fname = os.path.join(directory, f)
if fname.endswith(".verification"):
with open(fname, "rb") as fh:
for line in fh:
data = json.loads(line)
if data["valid"]:
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
largest = max(largest, int(data["item_name"], 10))
for n in xrange(largest):
item_name = str(n).zfill(10)
if not item_name in valids and os.path.exists(greader_items + '/' + item_name[:6] + '/' + item_name + '.gz'):
print item_name
if __name__ == '__main__':
main()
|
# ... existing code ...
basename = os.path.basename
def main():
greader_items = sys.argv[1]
basedirs = sys.argv[2:]
assert basedirs, "Give me some basedirs containing .verification files"
valids = set()
invalids = set()
largest = 0
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
continue
for f in filenames:
if f.startswith("."):
continue
fname = os.path.join(directory, f)
# ... modified code ...
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
largest = max(largest, int(data["item_name"], 10))
for n in xrange(largest):
item_name = str(n).zfill(10)
if not item_name in valids and os.path.exists(greader_items + '/' + item_name[:6] + '/' + item_name + '.gz'):
print item_name
if __name__ == '__main__':
# ... rest of the code ...
|
2a6d792cf702ebe96e3741e029d523a43ce073fb
|
core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DescriptiveDocFormatter.java
|
core/processor/src/main/java/io/quarkus/annotation/processor/generate_doc/DescriptiveDocFormatter.java
|
package io.quarkus.annotation.processor.generate_doc;
import java.util.List;
class DescriptiveDocFormatter implements DocFormatter {
private static final String ENTRY_END = "\n\n";
private static final String DETAILS_TITLE = "\n== Details\n";
private static final String DEFAULTS_VALUE_FORMAT = "Defaults to: `%s` +\n";
private static final String BASIC_DESCRIPTION_FORMAT = "\n[[%s]]\n`%s`%s:: %s \n+\nType: `%s` +\n";
/**
* Generate configuration keys in descriptive format.
* The key defines an anchor that used to link the description with the corresponding
* key in the table of summary.
*/
@Override
public String format(List<ConfigItem> configItems) {
StringBuilder generatedAsciiDoc = new StringBuilder(DETAILS_TITLE);
for (ConfigItem configItem : configItems) {
final String basicDescription = String.format(BASIC_DESCRIPTION_FORMAT, getAnchor(configItem), configItem.getKey(),
configItem.getConfigPhase().getIllustration(), configItem.getConfigDoc(), configItem.getType());
generatedAsciiDoc.append(basicDescription);
if (!configItem.getDefaultValue().isEmpty()) {
generatedAsciiDoc.append(String.format(DEFAULTS_VALUE_FORMAT, configItem.getDefaultValue()));
}
generatedAsciiDoc.append(ENTRY_END);
}
return generatedAsciiDoc.toString();
}
}
|
package io.quarkus.annotation.processor.generate_doc;
import java.util.List;
class DescriptiveDocFormatter implements DocFormatter {
private static final String ENTRY_END = "\n\n";
private static final String DETAILS_TITLE = "\n== Details\n";
private static final String DEFAULTS_VALUE_FORMAT = "Defaults to: `%s` +\n";
private static final String BASIC_DESCRIPTION_FORMAT = "\n[[%s]]\n`%s`%s:: %s \n+\nType: `%s` +\n";
/**
* Generate configuration keys in descriptive format.
* The key defines an anchor that used to link the description with the corresponding
* key in the table of summary.
*/
@Override
public String format(List<ConfigItem> configItems) {
StringBuilder generatedAsciiDoc = new StringBuilder(DETAILS_TITLE);
for (ConfigItem configItem : configItems) {
final String basicDescription = String.format(BASIC_DESCRIPTION_FORMAT, getAnchor(configItem), configItem.getKey(),
configItem.getConfigPhase().getIllustration(), configItem.getConfigDoc(),
configItem.computeTypeSimpleName());
generatedAsciiDoc.append(basicDescription);
if (!configItem.getDefaultValue().isEmpty()) {
generatedAsciiDoc.append(String.format(DEFAULTS_VALUE_FORMAT, configItem.getDefaultValue()));
}
generatedAsciiDoc.append(ENTRY_END);
}
return generatedAsciiDoc.toString();
}
}
|
Include simple type in bottom descrption
|
Include simple type in bottom descrption
|
Java
|
apache-2.0
|
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
|
java
|
## Code Before:
package io.quarkus.annotation.processor.generate_doc;
import java.util.List;
class DescriptiveDocFormatter implements DocFormatter {
private static final String ENTRY_END = "\n\n";
private static final String DETAILS_TITLE = "\n== Details\n";
private static final String DEFAULTS_VALUE_FORMAT = "Defaults to: `%s` +\n";
private static final String BASIC_DESCRIPTION_FORMAT = "\n[[%s]]\n`%s`%s:: %s \n+\nType: `%s` +\n";
/**
* Generate configuration keys in descriptive format.
* The key defines an anchor that used to link the description with the corresponding
* key in the table of summary.
*/
@Override
public String format(List<ConfigItem> configItems) {
StringBuilder generatedAsciiDoc = new StringBuilder(DETAILS_TITLE);
for (ConfigItem configItem : configItems) {
final String basicDescription = String.format(BASIC_DESCRIPTION_FORMAT, getAnchor(configItem), configItem.getKey(),
configItem.getConfigPhase().getIllustration(), configItem.getConfigDoc(), configItem.getType());
generatedAsciiDoc.append(basicDescription);
if (!configItem.getDefaultValue().isEmpty()) {
generatedAsciiDoc.append(String.format(DEFAULTS_VALUE_FORMAT, configItem.getDefaultValue()));
}
generatedAsciiDoc.append(ENTRY_END);
}
return generatedAsciiDoc.toString();
}
}
## Instruction:
Include simple type in bottom descrption
## Code After:
package io.quarkus.annotation.processor.generate_doc;
import java.util.List;
class DescriptiveDocFormatter implements DocFormatter {
private static final String ENTRY_END = "\n\n";
private static final String DETAILS_TITLE = "\n== Details\n";
private static final String DEFAULTS_VALUE_FORMAT = "Defaults to: `%s` +\n";
private static final String BASIC_DESCRIPTION_FORMAT = "\n[[%s]]\n`%s`%s:: %s \n+\nType: `%s` +\n";
/**
* Generate configuration keys in descriptive format.
* The key defines an anchor that used to link the description with the corresponding
* key in the table of summary.
*/
@Override
public String format(List<ConfigItem> configItems) {
StringBuilder generatedAsciiDoc = new StringBuilder(DETAILS_TITLE);
for (ConfigItem configItem : configItems) {
final String basicDescription = String.format(BASIC_DESCRIPTION_FORMAT, getAnchor(configItem), configItem.getKey(),
configItem.getConfigPhase().getIllustration(), configItem.getConfigDoc(),
configItem.computeTypeSimpleName());
generatedAsciiDoc.append(basicDescription);
if (!configItem.getDefaultValue().isEmpty()) {
generatedAsciiDoc.append(String.format(DEFAULTS_VALUE_FORMAT, configItem.getDefaultValue()));
}
generatedAsciiDoc.append(ENTRY_END);
}
return generatedAsciiDoc.toString();
}
}
|
// ... existing code ...
StringBuilder generatedAsciiDoc = new StringBuilder(DETAILS_TITLE);
for (ConfigItem configItem : configItems) {
final String basicDescription = String.format(BASIC_DESCRIPTION_FORMAT, getAnchor(configItem), configItem.getKey(),
configItem.getConfigPhase().getIllustration(), configItem.getConfigDoc(),
configItem.computeTypeSimpleName());
generatedAsciiDoc.append(basicDescription);
if (!configItem.getDefaultValue().isEmpty()) {
// ... rest of the code ...
|
200027f73a99f18eeeae4395be9622c65590916f
|
fireplace/cards/gvg/neutral_epic.py
|
fireplace/cards/gvg/neutral_epic.py
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field:
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
Exclude Enhance-o Mechano from its own buff targets
|
Exclude Enhance-o Mechano from its own buff targets
|
Python
|
agpl-3.0
|
oftc-ftw/fireplace,smallnamespace/fireplace,butozerca/fireplace,jleclanche/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,liujimj/fireplace,Ragowit/fireplace,liujimj/fireplace,Meerkov/fireplace,Meerkov/fireplace,NightKev/fireplace,butozerca/fireplace,amw2104/fireplace,beheh/fireplace,smallnamespace/fireplace,amw2104/fireplace
|
python
|
## Code Before:
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field:
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
## Instruction:
Exclude Enhance-o Mechano from its own buff targets
## Code After:
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
events = [
OWN_MINION_PLAY.on(
lambda self, player, card, *args: card.atk == 1 and [Buff(card, "GVG_104a")] or []
)
]
# Piloted Sky Golem
class GVG_105:
def deathrattle(self):
return [Summon(CONTROLLER, randomCollectible(type=CardType.MINION, cost=4))]
# Junkbot
class GVG_106:
events = [
Death(FRIENDLY + MECH).on(Buff(SELF, "GVG_106e"))
]
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# Recombobulator
class GVG_108:
def action(self, target):
choice = randomCollectible(type=CardType.MINION, cost=target.cost)
return [Morph(TARGET, choice)]
# Clockwork Giant
class GVG_121:
def cost(self, value):
return value - len(self.controller.opponent.hand)
|
# ... existing code ...
# Enhance-o Mechano
class GVG_107:
def action(self):
for target in self.controller.field.exclude(self):
tag = random.choice((GameTag.WINDFURY, GameTag.TAUNT, GameTag.DIVINE_SHIELD))
yield SetTag(target, {tag: True})
# ... rest of the code ...
|
c81f8f92a3fe826c7bdf93e825ec60a937c56a1d
|
src/main/java/SW9/abstractions/QueryState.java
|
src/main/java/SW9/abstractions/QueryState.java
|
package SW9.abstractions;
import SW9.utility.colors.Color;
public enum QueryState {
SUCCESSFUL(Color.GREEN, Color.Intensity.I700, "✓"),
ERROR(Color.RED, Color.Intensity.I700, "✘"),
RUNNING(Color.GREY_BLUE, Color.Intensity.I600, "···"),
UNKNOWN(Color.GREY, Color.Intensity.I600, "?"),
SYNTAX_ERROR(Color.PURPLE, Color.Intensity.I700, "!");
private final Color color;
private final Color.Intensity colorIntensity;
private final String indicator;
QueryState(final Color color, final Color.Intensity colorIntensity, final String indicator) {
this.color = color;
this.colorIntensity = colorIntensity;
this.indicator = indicator;
}
public Color getColor() {
return color;
}
public Color.Intensity getColorIntensity() {
return colorIntensity;
}
public String getIndicator() {
return indicator;
}
}
|
package SW9.abstractions;
import SW9.utility.colors.Color;
public enum QueryState {
SUCCESSFUL(Color.GREEN, Color.Intensity.I700),
ERROR(Color.RED, Color.Intensity.I700),
RUNNING(Color.GREY_BLUE, Color.Intensity.I600),
UNKNOWN(Color.GREY, Color.Intensity.I600),
SYNTAX_ERROR(Color.PURPLE, Color.Intensity.I700);
private final Color color;
private final Color.Intensity colorIntensity;
QueryState(final Color color, final Color.Intensity colorIntensity) {
this.color = color;
this.colorIntensity = colorIntensity;
}
public Color getColor() {
return color;
}
public Color.Intensity getColorIntensity() {
return colorIntensity;
}
}
|
Remove unused property in query state
|
Remove unused property in query state
|
Java
|
mit
|
ulriknyman/H-Uppaal
|
java
|
## Code Before:
package SW9.abstractions;
import SW9.utility.colors.Color;
public enum QueryState {
SUCCESSFUL(Color.GREEN, Color.Intensity.I700, "✓"),
ERROR(Color.RED, Color.Intensity.I700, "✘"),
RUNNING(Color.GREY_BLUE, Color.Intensity.I600, "···"),
UNKNOWN(Color.GREY, Color.Intensity.I600, "?"),
SYNTAX_ERROR(Color.PURPLE, Color.Intensity.I700, "!");
private final Color color;
private final Color.Intensity colorIntensity;
private final String indicator;
QueryState(final Color color, final Color.Intensity colorIntensity, final String indicator) {
this.color = color;
this.colorIntensity = colorIntensity;
this.indicator = indicator;
}
public Color getColor() {
return color;
}
public Color.Intensity getColorIntensity() {
return colorIntensity;
}
public String getIndicator() {
return indicator;
}
}
## Instruction:
Remove unused property in query state
## Code After:
package SW9.abstractions;
import SW9.utility.colors.Color;
public enum QueryState {
SUCCESSFUL(Color.GREEN, Color.Intensity.I700),
ERROR(Color.RED, Color.Intensity.I700),
RUNNING(Color.GREY_BLUE, Color.Intensity.I600),
UNKNOWN(Color.GREY, Color.Intensity.I600),
SYNTAX_ERROR(Color.PURPLE, Color.Intensity.I700);
private final Color color;
private final Color.Intensity colorIntensity;
QueryState(final Color color, final Color.Intensity colorIntensity) {
this.color = color;
this.colorIntensity = colorIntensity;
}
public Color getColor() {
return color;
}
public Color.Intensity getColorIntensity() {
return colorIntensity;
}
}
|
# ... existing code ...
import SW9.utility.colors.Color;
public enum QueryState {
SUCCESSFUL(Color.GREEN, Color.Intensity.I700),
ERROR(Color.RED, Color.Intensity.I700),
RUNNING(Color.GREY_BLUE, Color.Intensity.I600),
UNKNOWN(Color.GREY, Color.Intensity.I600),
SYNTAX_ERROR(Color.PURPLE, Color.Intensity.I700);
private final Color color;
private final Color.Intensity colorIntensity;
QueryState(final Color color, final Color.Intensity colorIntensity) {
this.color = color;
this.colorIntensity = colorIntensity;
}
public Color getColor() {
# ... modified code ...
public Color.Intensity getColorIntensity() {
return colorIntensity;
}
}
# ... rest of the code ...
|
c11d0ec668a0755a9c5db2cb4dd372d8ab3e8a0d
|
.circleci/get_repos.py
|
.circleci/get_repos.py
|
from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
|
from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
|
Update with new metadata file system (removing use of ggd repo cloning)
|
Update with new metadata file system (removing use of ggd repo cloning)
|
Python
|
mit
|
gogetdata/ggd-cli,gogetdata/ggd-cli
|
python
|
## Code Before:
from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local repo
utils.update_local_repo()
##Uupdate metadata repo
utils.update_metadata_local_repo()
## Get species
utils.get_species(update_repo=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
## Instruction:
Update with new metadata file system (removing use of ggd repo cloning)
## Code After:
from __future__ import print_function
import os
import sys
import subprocess as sp
import pytest
import yaml
import requests
import argparse
import re
from ggd import utils
#---------------------------------------------------------------------
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
## get channel data
for x in channels:
utils.get_channel_data(x)
|
// ... existing code ...
## Clone repos
#---------------------------------------------------------------------
## Update local genomic metadata files
utils.update_genome_metadata_files()
##Update local channeldata.json metadata file
utils.update_channel_data_files("genomics")
## Get species
utils.get_species(update_files=True)
## get channels
channels = utils.get_ggd_channels()
// ... rest of the code ...
|
7c3cf9e430bee4451e817ccc3d32884ed0c5f8e9
|
bakeit/uploader.py
|
bakeit/uploader.py
|
try:
from urllib.request import urlopen, Request, HTTPError
except ImportError:
from urllib2 import urlopen, Request, HTTPError
import json
class PasteryUploader():
def __init__(self, api_key):
"""
Initialize an Uploader instance with the given API key.
"""
self.api_key = api_key
def upload(self, body, title="", language=None, duration=None, max_views=0):
"""
Upload the given body with the specified language type.
"""
url = "https://www.pastery.net/api/paste/?api_key=%s" % self.api_key
if title:
url += "&title=%s" % title
if language:
url += "&language=%s" % language
if duration:
url += "&duration=%s" % duration
if max_views:
url += "&max_views=%s" % max_views
body = bytes(body.encode("utf8"))
req = Request(url, data=body, headers={'User-Agent': u'Mozilla/5.0 (Python) bakeit library'})
try:
response = urlopen(req)
except HTTPError as e:
response = json.loads(e.read())
raise RuntimeError(response["error_msg"])
response = json.loads(response.read().decode("utf8"))
return response["url"]
|
try:
from urllib.request import urlopen, Request
from urllib.error import HTTPError
except ImportError:
from urllib2 import urlopen, Request, HTTPError
import json
class PasteryUploader():
def __init__(self, api_key):
"""
Initialize an Uploader instance with the given API key.
"""
self.api_key = api_key
def upload(self, body, title="", language=None, duration=None, max_views=0):
"""
Upload the given body with the specified language type.
"""
url = "https://www.pastery.net/api/paste/?api_key=%s" % self.api_key
if title:
url += "&title=%s" % title
if language:
url += "&language=%s" % language
if duration:
url += "&duration=%s" % duration
if max_views:
url += "&max_views=%s" % max_views
body = bytes(body.encode("utf8"))
req = Request(url, data=body, headers={'User-Agent': u'Mozilla/5.0 (Python) bakeit library'})
try:
response = urlopen(req)
except HTTPError as e:
response = json.loads(e.read().decode("utf8"))
raise RuntimeError(response["error_msg"])
response = json.loads(response.read().decode("utf8"))
return response["url"]
|
Fix Python3 error when decoding the response.
|
fix: Fix Python3 error when decoding the response.
|
Python
|
mit
|
skorokithakis/bakeit
|
python
|
## Code Before:
try:
from urllib.request import urlopen, Request, HTTPError
except ImportError:
from urllib2 import urlopen, Request, HTTPError
import json
class PasteryUploader():
def __init__(self, api_key):
"""
Initialize an Uploader instance with the given API key.
"""
self.api_key = api_key
def upload(self, body, title="", language=None, duration=None, max_views=0):
"""
Upload the given body with the specified language type.
"""
url = "https://www.pastery.net/api/paste/?api_key=%s" % self.api_key
if title:
url += "&title=%s" % title
if language:
url += "&language=%s" % language
if duration:
url += "&duration=%s" % duration
if max_views:
url += "&max_views=%s" % max_views
body = bytes(body.encode("utf8"))
req = Request(url, data=body, headers={'User-Agent': u'Mozilla/5.0 (Python) bakeit library'})
try:
response = urlopen(req)
except HTTPError as e:
response = json.loads(e.read())
raise RuntimeError(response["error_msg"])
response = json.loads(response.read().decode("utf8"))
return response["url"]
## Instruction:
fix: Fix Python3 error when decoding the response.
## Code After:
try:
from urllib.request import urlopen, Request
from urllib.error import HTTPError
except ImportError:
from urllib2 import urlopen, Request, HTTPError
import json
class PasteryUploader():
def __init__(self, api_key):
"""
Initialize an Uploader instance with the given API key.
"""
self.api_key = api_key
def upload(self, body, title="", language=None, duration=None, max_views=0):
"""
Upload the given body with the specified language type.
"""
url = "https://www.pastery.net/api/paste/?api_key=%s" % self.api_key
if title:
url += "&title=%s" % title
if language:
url += "&language=%s" % language
if duration:
url += "&duration=%s" % duration
if max_views:
url += "&max_views=%s" % max_views
body = bytes(body.encode("utf8"))
req = Request(url, data=body, headers={'User-Agent': u'Mozilla/5.0 (Python) bakeit library'})
try:
response = urlopen(req)
except HTTPError as e:
response = json.loads(e.read().decode("utf8"))
raise RuntimeError(response["error_msg"])
response = json.loads(response.read().decode("utf8"))
return response["url"]
|
# ... existing code ...
try:
from urllib.request import urlopen, Request
from urllib.error import HTTPError
except ImportError:
from urllib2 import urlopen, Request, HTTPError
import json
# ... modified code ...
try:
response = urlopen(req)
except HTTPError as e:
response = json.loads(e.read().decode("utf8"))
raise RuntimeError(response["error_msg"])
response = json.loads(response.read().decode("utf8"))
return response["url"]
# ... rest of the code ...
|
b912fb63cd043f653cdba9abe0b5a1c4b5ebb9c0
|
RxHyperdrive/RxHyperdrive.h
|
RxHyperdrive/RxHyperdrive.h
|
//
// RxHyperdrive.h
// RxHyperdrive
//
// Created by Kyle Fuller on 13/09/2015.
// Copyright © 2015 Cocode. All rights reserved.
//
#import <Cocoa/Cocoa.h>
//! Project version number for RxHyperdrive.
FOUNDATION_EXPORT double RxHyperdriveVersionNumber;
//! Project version string for RxHyperdrive.
FOUNDATION_EXPORT const unsigned char RxHyperdriveVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <RxHyperdrive/PublicHeader.h>
|
//
// RxHyperdrive.h
// RxHyperdrive
//
// Created by Kyle Fuller on 13/09/2015.
// Copyright © 2015 Cocode. All rights reserved.
//
@import Foundation;
//! Project version number for RxHyperdrive.
FOUNDATION_EXPORT double RxHyperdriveVersionNumber;
//! Project version string for RxHyperdrive.
FOUNDATION_EXPORT const unsigned char RxHyperdriveVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <RxHyperdrive/PublicHeader.h>
|
Use Foundation in the umbrella header
|
Use Foundation in the umbrella header
|
C
|
mit
|
kylef/RxHyperdrive,kylef/RxHyperdrive
|
c
|
## Code Before:
//
// RxHyperdrive.h
// RxHyperdrive
//
// Created by Kyle Fuller on 13/09/2015.
// Copyright © 2015 Cocode. All rights reserved.
//
#import <Cocoa/Cocoa.h>
//! Project version number for RxHyperdrive.
FOUNDATION_EXPORT double RxHyperdriveVersionNumber;
//! Project version string for RxHyperdrive.
FOUNDATION_EXPORT const unsigned char RxHyperdriveVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <RxHyperdrive/PublicHeader.h>
## Instruction:
Use Foundation in the umbrella header
## Code After:
//
// RxHyperdrive.h
// RxHyperdrive
//
// Created by Kyle Fuller on 13/09/2015.
// Copyright © 2015 Cocode. All rights reserved.
//
@import Foundation;
//! Project version number for RxHyperdrive.
FOUNDATION_EXPORT double RxHyperdriveVersionNumber;
//! Project version string for RxHyperdrive.
FOUNDATION_EXPORT const unsigned char RxHyperdriveVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <RxHyperdrive/PublicHeader.h>
|
# ... existing code ...
// Copyright © 2015 Cocode. All rights reserved.
//
@import Foundation;
//! Project version number for RxHyperdrive.
FOUNDATION_EXPORT double RxHyperdriveVersionNumber;
# ... rest of the code ...
|
667f1a5478e2196696554fc32993cd5ff2b718fa
|
You-DataStore/internal/operations/serialization_operation.h
|
You-DataStore/internal/operations/serialization_operation.h
|
namespace You {
namespace DataStore {
namespace Internal {
class SerializationOperation : public IOperation {
public:
/// Serialize task to an xml node
static void serialize(const SerializedTask&, pugi::xml_node&);
/// Deserialize task from an xml node
static SerializedTask deserialize(const pugi::xml_node&);
private:
/// The new task contents.
SerializedTask task;
};
} // namespace Internal
} // namespace DataStore
} // namespace You
#endif // YOU_DATASTORE_INTERNAL_OPERATIONS_SERIALIZATION_OPERATION_H_
|
namespace You {
namespace DataStore {
namespace Internal {
class SerializationOperation : public IOperation {
public:
/// Serialize task to an xml node
static void serialize(const SerializedTask&, pugi::xml_node&);
/// Deserialize task from an xml node
static SerializedTask deserialize(const pugi::xml_node&);
};
} // namespace Internal
} // namespace DataStore
} // namespace You
#endif // YOU_DATASTORE_INTERNAL_OPERATIONS_SERIALIZATION_OPERATION_H_
|
Remove unnecessary member variable in SerializationOperation
|
Remove unnecessary member variable in SerializationOperation
|
C
|
mit
|
cs2103aug2014-w10-1c/main,cs2103aug2014-w10-1c/main
|
c
|
## Code Before:
namespace You {
namespace DataStore {
namespace Internal {
class SerializationOperation : public IOperation {
public:
/// Serialize task to an xml node
static void serialize(const SerializedTask&, pugi::xml_node&);
/// Deserialize task from an xml node
static SerializedTask deserialize(const pugi::xml_node&);
private:
/// The new task contents.
SerializedTask task;
};
} // namespace Internal
} // namespace DataStore
} // namespace You
#endif // YOU_DATASTORE_INTERNAL_OPERATIONS_SERIALIZATION_OPERATION_H_
## Instruction:
Remove unnecessary member variable in SerializationOperation
## Code After:
namespace You {
namespace DataStore {
namespace Internal {
class SerializationOperation : public IOperation {
public:
/// Serialize task to an xml node
static void serialize(const SerializedTask&, pugi::xml_node&);
/// Deserialize task from an xml node
static SerializedTask deserialize(const pugi::xml_node&);
};
} // namespace Internal
} // namespace DataStore
} // namespace You
#endif // YOU_DATASTORE_INTERNAL_OPERATIONS_SERIALIZATION_OPERATION_H_
|
# ... existing code ...
/// Deserialize task from an xml node
static SerializedTask deserialize(const pugi::xml_node&);
};
} // namespace Internal
# ... rest of the code ...
|
1cab84d3f3726df2a7cfe4e5ad8efee81051c73e
|
tests/test_patched_stream.py
|
tests/test_patched_stream.py
|
import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
|
import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
|
Add tests for loading binaries with malformed sections
|
Add tests for loading binaries with malformed sections
|
Python
|
bsd-2-clause
|
angr/cle
|
python
|
## Code Before:
import nose
import StringIO
import cle
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
## Instruction:
Add tests for loading binaries with malformed sections
## Code After:
import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
stream1 = cle.PatchedStream(stream, [(2, 'AA')])
stream1.seek(0)
nose.tools.assert_equal(stream1.read(), '01AA456789abcdef')
stream2 = cle.PatchedStream(stream, [(2, 'AA')])
stream2.seek(0)
nose.tools.assert_equal(stream2.read(3), '01A')
stream3 = cle.PatchedStream(stream, [(2, 'AA')])
stream3.seek(3)
nose.tools.assert_equal(stream3.read(3), 'A45')
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
|
# ... existing code ...
import nose
import StringIO
import os
import cle
tests_path = os.path.join(os.path.dirname(__file__), '..', '..', 'binaries', 'tests')
def test_patched_stream():
stream = StringIO.StringIO('0123456789abcdef')
# ... modified code ...
stream4 = cle.PatchedStream(stream, [(-1, 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA')])
stream4.seek(0)
nose.tools.assert_equal(stream4.read(), 'A'*0x10)
def test_malformed_sections():
ld = cle.Loader(os.path.join(tests_path, 'i386', 'oxfoo1m3'))
nose.tools.assert_equal(len(ld.main_object.segments), 1)
nose.tools.assert_equal(len(ld.main_object.sections), 0)
# ... rest of the code ...
|
f44be40fe1d9be9535a4819d7ef468cbba34ea74
|
scheduler.h
|
scheduler.h
|
/*
* scheduler.h
*
* Created on: Jun 8, 2016
* Author: riley
*/
#ifndef SCHEDULER_H
#define SCHEDULER_H
#include <stdint.h>
#include "scheduler_private.h"
typedef void (*task_func_t)( void ) ;
typedef volatile struct task_private_s {
const char * name;
volatile void * task_sp; //Task stack pointer
volatile struct task_private_s * next;
} task_t;
/// Sets up the idle task
void scheduler_init( void );
/**
* Add task to task list to be run at next context switch.
* Push task routine pointer and empty status register
* onto the new task stack so they can be popped off later
* from the task switch interrupt.
*/
void scheduler_add_task(task_t * task_handle, const char * name,
task_func_t func, uint16_t * task_stack, uint16_t stack_bytes);
/// Kicks off the timer interrupt
void scheduler_run( void );
/**
* Handy macro which blackboxes the allocation of memory
* per task. Accepts the task function to schedule
* and the size of stack to allocate as arguments.
*/
#define SCHEDULER_ADD(func, stack_size) \
CREATE_TASK_HANDLE(__LINE__, func); \
CREATE_TASK_STACK(__LINE__, func, stack_size); \
CALL_SCHEDULER_ADD(__LINE__, func);
#endif /* SCHEDULER_H */
|
/*
* scheduler.h
*
* Created on: Jun 8, 2016
* Author: riley
*/
#ifndef SCHEDULER_H
#define SCHEDULER_H
#include <stdint.h>
#include "scheduler_private.h"
typedef void (*task_func_t)( void ) ;
typedef volatile struct task_private_s {
const char * name;
volatile void * task_sp; //Task stack pointer
volatile struct task_private_s * next;
} task_t;
/// Sets up the idle task
void scheduler_init( void );
/**
* Add task to task list to be run at next context switch.
*/
void scheduler_add_task(task_t * task_handle, const char * name,
task_func_t func, uint16_t * task_stack, uint16_t stack_bytes);
/// Kicks off the timer interrupt
void scheduler_run( void );
/**
* Handy macro which blackboxes the allocation of memory
* per task. Accepts the task function to schedule
* and the size of stack to allocate as arguments.
*/
#define SCHEDULER_ADD(func, stack_size) \
CREATE_TASK_HANDLE(__LINE__, func); \
CREATE_TASK_STACK(__LINE__, func, stack_size); \
CALL_SCHEDULER_ADD(__LINE__, func);
#endif /* SCHEDULER_H */
|
Remove implementation details from API comment
|
Remove implementation details from API comment
|
C
|
mit
|
rjw245/rileyOS
|
c
|
## Code Before:
/*
* scheduler.h
*
* Created on: Jun 8, 2016
* Author: riley
*/
#ifndef SCHEDULER_H
#define SCHEDULER_H
#include <stdint.h>
#include "scheduler_private.h"
typedef void (*task_func_t)( void ) ;
typedef volatile struct task_private_s {
const char * name;
volatile void * task_sp; //Task stack pointer
volatile struct task_private_s * next;
} task_t;
/// Sets up the idle task
void scheduler_init( void );
/**
* Add task to task list to be run at next context switch.
* Push task routine pointer and empty status register
* onto the new task stack so they can be popped off later
* from the task switch interrupt.
*/
void scheduler_add_task(task_t * task_handle, const char * name,
task_func_t func, uint16_t * task_stack, uint16_t stack_bytes);
/// Kicks off the timer interrupt
void scheduler_run( void );
/**
* Handy macro which blackboxes the allocation of memory
* per task. Accepts the task function to schedule
* and the size of stack to allocate as arguments.
*/
#define SCHEDULER_ADD(func, stack_size) \
CREATE_TASK_HANDLE(__LINE__, func); \
CREATE_TASK_STACK(__LINE__, func, stack_size); \
CALL_SCHEDULER_ADD(__LINE__, func);
#endif /* SCHEDULER_H */
## Instruction:
Remove implementation details from API comment
## Code After:
/*
* scheduler.h
*
* Created on: Jun 8, 2016
* Author: riley
*/
#ifndef SCHEDULER_H
#define SCHEDULER_H
#include <stdint.h>
#include "scheduler_private.h"
typedef void (*task_func_t)( void ) ;
typedef volatile struct task_private_s {
const char * name;
volatile void * task_sp; //Task stack pointer
volatile struct task_private_s * next;
} task_t;
/// Sets up the idle task
void scheduler_init( void );
/**
* Add task to task list to be run at next context switch.
*/
void scheduler_add_task(task_t * task_handle, const char * name,
task_func_t func, uint16_t * task_stack, uint16_t stack_bytes);
/// Kicks off the timer interrupt
void scheduler_run( void );
/**
* Handy macro which blackboxes the allocation of memory
* per task. Accepts the task function to schedule
* and the size of stack to allocate as arguments.
*/
#define SCHEDULER_ADD(func, stack_size) \
CREATE_TASK_HANDLE(__LINE__, func); \
CREATE_TASK_STACK(__LINE__, func, stack_size); \
CALL_SCHEDULER_ADD(__LINE__, func);
#endif /* SCHEDULER_H */
|
...
/**
* Add task to task list to be run at next context switch.
*/
void scheduler_add_task(task_t * task_handle, const char * name,
task_func_t func, uint16_t * task_stack, uint16_t stack_bytes);
...
|
d703d7cb8d75a5c660beabccdd0082794a8471d1
|
edisgo/tools/networkx_helper.py
|
edisgo/tools/networkx_helper.py
|
from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
|
from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
|
Include the position into the graph
|
Include the position into the graph
|
Python
|
agpl-3.0
|
openego/eDisGo,openego/eDisGo
|
python
|
## Code Before:
from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
## Instruction:
Include the position into the graph
## Code After:
from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
|
// ... existing code ...
from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
// ... rest of the code ...
|
ab0f6115c50bea63856c1e880249ad4bdca3ce42
|
src/web/urls.py
|
src/web/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
Add ansible namespace in root URLconf
|
Add ansible namespace in root URLconf
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
python
|
## Code Before:
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
## Instruction:
Add ansible namespace in root URLconf
## Code After:
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
# ... existing code ...
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
# ... rest of the code ...
|
10f19471a815908fa93912b031d449e16f5bc67f
|
test-common/src/main/java/com/gentics/mesh/test/docker/ElasticsearchContainer.java
|
test-common/src/main/java/com/gentics/mesh/test/docker/ElasticsearchContainer.java
|
package com.gentics.mesh.test.docker;
import java.nio.charset.Charset;
import java.time.Duration;
import java.util.Collections;
import org.apache.commons.io.IOUtils;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.Wait;
import org.testcontainers.images.builder.ImageFromDockerfile;
/**
* Testcontainer for a non-clustered Elasticsearch instance.
*/
public class ElasticsearchContainer extends GenericContainer<ElasticsearchContainer> {
public static final String VERSION = "6.3.1";
public ElasticsearchContainer(boolean withIngestPlugin) {
super(prepareDockerImage(withIngestPlugin));
}
private static ImageFromDockerfile prepareDockerImage(boolean withIngestPlugin) {
try {
ImageFromDockerfile dockerImage = new ImageFromDockerfile("elasticsearch", false);
String dockerFile = IOUtils.toString(ElasticsearchContainer.class.getResourceAsStream("/elasticsearch/Dockerfile.ingest"),
Charset.defaultCharset());
dockerFile = dockerFile.replace("%VERSION%", VERSION);
if (!withIngestPlugin) {
dockerFile = dockerFile.replace("RUN", "#RUN");
}
dockerImage.withFileFromString("Dockerfile", dockerFile);
return dockerImage;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected void configure() {
addEnv("discovery.type", "single-node");
withTmpFs(Collections.singletonMap("/usr/share/elasticsearch/data", "rw,size=64m"));
// addEnv("xpack.security.enabled", "false");
withExposedPorts(9200);
withStartupTimeout(Duration.ofSeconds(250L));
waitingFor(Wait.forHttp("/"));
}
}
|
package com.gentics.mesh.test.docker;
import java.time.Duration;
import java.util.Collections;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.HttpWaitStrategy;
/**
* Testcontainer for a non-clustered Elasticsearch instance.
*/
public class ElasticsearchContainer extends GenericContainer<ElasticsearchContainer> {
public static final String VERSION = "6.6.1";
public ElasticsearchContainer(boolean withIngestPlugin) {
super(withIngestPlugin ? "jotschi/elasticsearch-ingest:" + VERSION : "docker.elastic.co/elasticsearch/elasticsearch-oss:" + VERSION);
}
@Override
protected void configure() {
addEnv("discovery.type", "single-node");
withTmpFs(Collections.singletonMap("/usr/share/elasticsearch/data", "rw,size=64m"));
// addEnv("xpack.security.enabled", "false");
withExposedPorts(9200);
withStartupTimeout(Duration.ofSeconds(250L));
waitingFor(new HttpWaitStrategy().forPath("/"));
}
}
|
Use pre-build docker image for ES and bump ES version to 6.6.1
|
Use pre-build docker image for ES and bump ES version to 6.6.1
|
Java
|
apache-2.0
|
gentics/mesh,gentics/mesh,gentics/mesh,gentics/mesh
|
java
|
## Code Before:
package com.gentics.mesh.test.docker;
import java.nio.charset.Charset;
import java.time.Duration;
import java.util.Collections;
import org.apache.commons.io.IOUtils;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.Wait;
import org.testcontainers.images.builder.ImageFromDockerfile;
/**
* Testcontainer for a non-clustered Elasticsearch instance.
*/
public class ElasticsearchContainer extends GenericContainer<ElasticsearchContainer> {
public static final String VERSION = "6.3.1";
public ElasticsearchContainer(boolean withIngestPlugin) {
super(prepareDockerImage(withIngestPlugin));
}
private static ImageFromDockerfile prepareDockerImage(boolean withIngestPlugin) {
try {
ImageFromDockerfile dockerImage = new ImageFromDockerfile("elasticsearch", false);
String dockerFile = IOUtils.toString(ElasticsearchContainer.class.getResourceAsStream("/elasticsearch/Dockerfile.ingest"),
Charset.defaultCharset());
dockerFile = dockerFile.replace("%VERSION%", VERSION);
if (!withIngestPlugin) {
dockerFile = dockerFile.replace("RUN", "#RUN");
}
dockerImage.withFileFromString("Dockerfile", dockerFile);
return dockerImage;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected void configure() {
addEnv("discovery.type", "single-node");
withTmpFs(Collections.singletonMap("/usr/share/elasticsearch/data", "rw,size=64m"));
// addEnv("xpack.security.enabled", "false");
withExposedPorts(9200);
withStartupTimeout(Duration.ofSeconds(250L));
waitingFor(Wait.forHttp("/"));
}
}
## Instruction:
Use pre-build docker image for ES and bump ES version to 6.6.1
## Code After:
package com.gentics.mesh.test.docker;
import java.time.Duration;
import java.util.Collections;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.HttpWaitStrategy;
/**
* Testcontainer for a non-clustered Elasticsearch instance.
*/
public class ElasticsearchContainer extends GenericContainer<ElasticsearchContainer> {
public static final String VERSION = "6.6.1";
public ElasticsearchContainer(boolean withIngestPlugin) {
super(withIngestPlugin ? "jotschi/elasticsearch-ingest:" + VERSION : "docker.elastic.co/elasticsearch/elasticsearch-oss:" + VERSION);
}
@Override
protected void configure() {
addEnv("discovery.type", "single-node");
withTmpFs(Collections.singletonMap("/usr/share/elasticsearch/data", "rw,size=64m"));
// addEnv("xpack.security.enabled", "false");
withExposedPorts(9200);
withStartupTimeout(Duration.ofSeconds(250L));
waitingFor(new HttpWaitStrategy().forPath("/"));
}
}
|
# ... existing code ...
package com.gentics.mesh.test.docker;
import java.time.Duration;
import java.util.Collections;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.wait.strategy.HttpWaitStrategy;
/**
* Testcontainer for a non-clustered Elasticsearch instance.
# ... modified code ...
*/
public class ElasticsearchContainer extends GenericContainer<ElasticsearchContainer> {
public static final String VERSION = "6.6.1";
public ElasticsearchContainer(boolean withIngestPlugin) {
super(withIngestPlugin ? "jotschi/elasticsearch-ingest:" + VERSION : "docker.elastic.co/elasticsearch/elasticsearch-oss:" + VERSION);
}
@Override
...
// addEnv("xpack.security.enabled", "false");
withExposedPorts(9200);
withStartupTimeout(Duration.ofSeconds(250L));
waitingFor(new HttpWaitStrategy().forPath("/"));
}
}
# ... rest of the code ...
|
4c1e1012f7f3b2e64178321f8c48ea163fe84d42
|
goci-interfaces/goci-curation/src/main/java/uk/ac/ebi/spot/goci/curation/service/CurrentUserDetailsService.java
|
goci-interfaces/goci-curation/src/main/java/uk/ac/ebi/spot/goci/curation/service/CurrentUserDetailsService.java
|
package uk.ac.ebi.spot.goci.curation.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import uk.ac.ebi.spot.goci.curation.model.CurrentUser;
import uk.ac.ebi.spot.goci.model.SecureUser;
import uk.ac.ebi.spot.goci.repository.SecureUserRepository;
/**
* Created by emma on 10/02/15.
*
* @author emma
* <p>
* Implementation of springs UserDetailsService
*/
@Service
public class CurrentUserDetailsService implements UserDetailsService {
// Repository used to find users
private SecureUserRepository secureUserRepository;
@Autowired
public CurrentUserDetailsService(SecureUserRepository secureUserRepository) {
this.secureUserRepository = secureUserRepository;
}
@Override
public CurrentUser loadUserByUsername(String email) throws UsernameNotFoundException {
// Check user with email exists
if (secureUserRepository.findByEmail(email) != null) {
SecureUser secureUser = secureUserRepository.findByEmail(email);
return new CurrentUser(secureUser);
}
else {
throw new UsernameNotFoundException("User with email: " + email + " " +
"was not found");
}
}
}
|
package uk.ac.ebi.spot.goci.curation.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import uk.ac.ebi.spot.goci.curation.model.CurrentUser;
import uk.ac.ebi.spot.goci.model.SecureUser;
import uk.ac.ebi.spot.goci.repository.SecureUserRepository;
import javax.servlet.http.HttpServletRequest;
import java.security.Principal;
/**
* Created by emma on 10/02/15.
*
* @author emma
* <p>
* Implementation of springs UserDetailsService
*/
@Service
public class CurrentUserDetailsService implements UserDetailsService {
// Repository used to find users
private SecureUserRepository secureUserRepository;
@Autowired
public CurrentUserDetailsService(SecureUserRepository secureUserRepository) {
this.secureUserRepository = secureUserRepository;
}
@Override
public CurrentUser loadUserByUsername(String email) throws UsernameNotFoundException {
// Check user with email exists
if (secureUserRepository.findByEmail(email) != null) {
SecureUser secureUser = secureUserRepository.findByEmail(email);
return new CurrentUser(secureUser);
}
else {
throw new UsernameNotFoundException("User with email: " + email + " " +
"was not found");
}
}
/**
* Update a study status
*
* @param request request from which to obtain current user
* @return SecureUser that represents currently logged in user
*/
public SecureUser getUserFromRequest(HttpServletRequest request) {
Principal principal = request.getUserPrincipal();
String name = principal.getName();
return secureUserRepository.findByEmail(name);
}
}
|
Add method to get currently logged in user from http request
|
Add method to get currently logged in user from http request
|
Java
|
apache-2.0
|
EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci,EBISPOT/goci
|
java
|
## Code Before:
package uk.ac.ebi.spot.goci.curation.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import uk.ac.ebi.spot.goci.curation.model.CurrentUser;
import uk.ac.ebi.spot.goci.model.SecureUser;
import uk.ac.ebi.spot.goci.repository.SecureUserRepository;
/**
* Created by emma on 10/02/15.
*
* @author emma
* <p>
* Implementation of springs UserDetailsService
*/
@Service
public class CurrentUserDetailsService implements UserDetailsService {
// Repository used to find users
private SecureUserRepository secureUserRepository;
@Autowired
public CurrentUserDetailsService(SecureUserRepository secureUserRepository) {
this.secureUserRepository = secureUserRepository;
}
@Override
public CurrentUser loadUserByUsername(String email) throws UsernameNotFoundException {
// Check user with email exists
if (secureUserRepository.findByEmail(email) != null) {
SecureUser secureUser = secureUserRepository.findByEmail(email);
return new CurrentUser(secureUser);
}
else {
throw new UsernameNotFoundException("User with email: " + email + " " +
"was not found");
}
}
}
## Instruction:
Add method to get currently logged in user from http request
## Code After:
package uk.ac.ebi.spot.goci.curation.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import uk.ac.ebi.spot.goci.curation.model.CurrentUser;
import uk.ac.ebi.spot.goci.model.SecureUser;
import uk.ac.ebi.spot.goci.repository.SecureUserRepository;
import javax.servlet.http.HttpServletRequest;
import java.security.Principal;
/**
* Created by emma on 10/02/15.
*
* @author emma
* <p>
* Implementation of springs UserDetailsService
*/
@Service
public class CurrentUserDetailsService implements UserDetailsService {
// Repository used to find users
private SecureUserRepository secureUserRepository;
@Autowired
public CurrentUserDetailsService(SecureUserRepository secureUserRepository) {
this.secureUserRepository = secureUserRepository;
}
@Override
public CurrentUser loadUserByUsername(String email) throws UsernameNotFoundException {
// Check user with email exists
if (secureUserRepository.findByEmail(email) != null) {
SecureUser secureUser = secureUserRepository.findByEmail(email);
return new CurrentUser(secureUser);
}
else {
throw new UsernameNotFoundException("User with email: " + email + " " +
"was not found");
}
}
/**
* Update a study status
*
* @param request request from which to obtain current user
* @return SecureUser that represents currently logged in user
*/
public SecureUser getUserFromRequest(HttpServletRequest request) {
Principal principal = request.getUserPrincipal();
String name = principal.getName();
return secureUserRepository.findByEmail(name);
}
}
|
...
import uk.ac.ebi.spot.goci.curation.model.CurrentUser;
import uk.ac.ebi.spot.goci.model.SecureUser;
import uk.ac.ebi.spot.goci.repository.SecureUserRepository;
import javax.servlet.http.HttpServletRequest;
import java.security.Principal;
/**
* Created by emma on 10/02/15.
...
"was not found");
}
}
/**
* Update a study status
*
* @param request request from which to obtain current user
* @return SecureUser that represents currently logged in user
*/
public SecureUser getUserFromRequest(HttpServletRequest request) {
Principal principal = request.getUserPrincipal();
String name = principal.getName();
return secureUserRepository.findByEmail(name);
}
}
...
|
11b1577cd91c81469cc63767585c5862c9abf301
|
demo/app/src/main/java/com/chaquo/python/demo/MainActivity.java
|
demo/app/src/main/java/com/chaquo/python/demo/MainActivity.java
|
package com.chaquo.python.demo;
import android.os.*;
import android.support.v7.app.*;
import android.support.v7.preference.*;
import android.text.method.*;
import android.widget.*;
import com.chaquo.python.*;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (! Python.isStarted()) {
Python.start(new AndroidPlatform(this));
}
setContentView(R.layout.activity_main);
getSupportFragmentManager().beginTransaction()
.replace(R.id.flMenu, new MenuFragment())
.commit();
((TextView)findViewById(R.id.tvCaption)).setMovementMethod(LinkMovementMethod.getInstance());
}
public static class MenuFragment extends PreferenceFragmentCompat {
@Override
public void onCreatePreferences(Bundle savedInstanceState, String rootKey) {
addPreferencesFromResource(R.xml.activity_main);
}
}
}
|
package com.chaquo.python.demo;
import android.content.pm.*;
import android.os.*;
import android.support.v7.app.*;
import android.support.v7.preference.*;
import android.text.method.*;
import android.widget.*;
import com.chaquo.python.*;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
String version = getPackageManager().getPackageInfo(getPackageName(), 0).versionName;
setTitle(getTitle() + " " + version);
} catch (PackageManager.NameNotFoundException ignored) {}
if (! Python.isStarted()) {
Python.start(new AndroidPlatform(this));
}
setContentView(R.layout.activity_main);
getSupportFragmentManager().beginTransaction()
.replace(R.id.flMenu, new MenuFragment())
.commit();
((TextView)findViewById(R.id.tvCaption)).setMovementMethod(LinkMovementMethod.getInstance());
}
public static class MenuFragment extends PreferenceFragmentCompat {
@Override
public void onCreatePreferences(Bundle savedInstanceState, String rootKey) {
addPreferencesFromResource(R.xml.activity_main);
}
}
}
|
Add version number to menu screen
|
Add version number to menu screen
|
Java
|
mit
|
chaquo/chaquopy,chaquo/chaquopy,chaquo/chaquopy,chaquo/chaquopy,chaquo/chaquopy
|
java
|
## Code Before:
package com.chaquo.python.demo;
import android.os.*;
import android.support.v7.app.*;
import android.support.v7.preference.*;
import android.text.method.*;
import android.widget.*;
import com.chaquo.python.*;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (! Python.isStarted()) {
Python.start(new AndroidPlatform(this));
}
setContentView(R.layout.activity_main);
getSupportFragmentManager().beginTransaction()
.replace(R.id.flMenu, new MenuFragment())
.commit();
((TextView)findViewById(R.id.tvCaption)).setMovementMethod(LinkMovementMethod.getInstance());
}
public static class MenuFragment extends PreferenceFragmentCompat {
@Override
public void onCreatePreferences(Bundle savedInstanceState, String rootKey) {
addPreferencesFromResource(R.xml.activity_main);
}
}
}
## Instruction:
Add version number to menu screen
## Code After:
package com.chaquo.python.demo;
import android.content.pm.*;
import android.os.*;
import android.support.v7.app.*;
import android.support.v7.preference.*;
import android.text.method.*;
import android.widget.*;
import com.chaquo.python.*;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
String version = getPackageManager().getPackageInfo(getPackageName(), 0).versionName;
setTitle(getTitle() + " " + version);
} catch (PackageManager.NameNotFoundException ignored) {}
if (! Python.isStarted()) {
Python.start(new AndroidPlatform(this));
}
setContentView(R.layout.activity_main);
getSupportFragmentManager().beginTransaction()
.replace(R.id.flMenu, new MenuFragment())
.commit();
((TextView)findViewById(R.id.tvCaption)).setMovementMethod(LinkMovementMethod.getInstance());
}
public static class MenuFragment extends PreferenceFragmentCompat {
@Override
public void onCreatePreferences(Bundle savedInstanceState, String rootKey) {
addPreferencesFromResource(R.xml.activity_main);
}
}
}
|
# ... existing code ...
package com.chaquo.python.demo;
import android.content.pm.*;
import android.os.*;
import android.support.v7.app.*;
import android.support.v7.preference.*;
# ... modified code ...
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
try {
String version = getPackageManager().getPackageInfo(getPackageName(), 0).versionName;
setTitle(getTitle() + " " + version);
} catch (PackageManager.NameNotFoundException ignored) {}
if (! Python.isStarted()) {
Python.start(new AndroidPlatform(this));
}
# ... rest of the code ...
|
e59f187f2e4557114e534be57dc078ddf112b87c
|
completions_dev.py
|
completions_dev.py
|
import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
|
import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
|
Use tabs in new completions file snippet
|
Use tabs in new completions file snippet
Respects the user's indentation configuration.
|
Python
|
mit
|
SublimeText/PackageDev,SublimeText/AAAPackageDev,SublimeText/AAAPackageDev
|
python
|
## Code Before:
import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}"""
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
## Instruction:
Use tabs in new completions file snippet
Respects the user's indentation configuration.
## Code After:
import sublime_plugin
from sublime_lib.path import root_at_packages, get_package_name
PLUGIN_NAME = get_package_name()
COMPLETIONS_SYNTAX_DEF = "Packages/%s/Syntax Definitions/Sublime Completions.tmLanguage" % PLUGIN_NAME
TPL = """{
"scope": "source.${1:off}",
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
def run(self):
v = self.window.new_file()
v.run_command('insert_snippet', {"contents": TPL})
v.settings().set('syntax', COMPLETIONS_SYNTAX_DEF)
v.settings().set('default_dir', root_at_packages('User'))
|
# ... existing code ...
"completions": [
{ "trigger": "${2:some_trigger}", "contents": "${3:Hint: Use f, ff and fff plus Tab inside here.}" }$0
]
}""".replace(" ", "\t") # NOQA - line length
class NewCompletionsCommand(sublime_plugin.WindowCommand):
# ... rest of the code ...
|
590a1684c7c073879d74240685fe5a304afacfdd
|
setup.py
|
setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="mock-firestore",
version="0.1.2",
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
],
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="mock-firestore",
version="0.1.2",
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
],
)
|
Set description content type so it renders on PyPI
|
Set description content type so it renders on PyPI
|
Python
|
mit
|
mdowds/python-mock-firestore
|
python
|
## Code Before:
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="mock-firestore",
version="0.1.2",
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
],
)
## Instruction:
Set description content type so it renders on PyPI
## Code After:
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="mock-firestore",
version="0.1.2",
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
],
)
|
# ... existing code ...
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
# ... rest of the code ...
|
4ab29a29bc09f1bff0dda4b99a866959d6120b89
|
src/main/java/valandur/webapi/json/serializers/world/GeneratorTypeSerializer.java
|
src/main/java/valandur/webapi/json/serializers/world/GeneratorTypeSerializer.java
|
package valandur.webapi.json.serializers.world;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.spongepowered.api.world.GeneratorType;
import java.io.IOException;
public class GeneratorTypeSerializer extends StdSerializer<GeneratorType> {
public GeneratorTypeSerializer() {
this(null);
}
public GeneratorTypeSerializer(Class<GeneratorType> t) {
super(t);
}
@Override
public void serialize(GeneratorType value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
gen.writeStringField("id", value.getId());
gen.writeStringField("name", value.getName());
gen.writeObjectField("settings", value.getGeneratorSettings());
gen.writeEndObject();
}
}
|
package valandur.webapi.json.serializers.world;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.spongepowered.api.data.DataQuery;
import org.spongepowered.api.world.GeneratorType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
public class GeneratorTypeSerializer extends StdSerializer<GeneratorType> {
public GeneratorTypeSerializer() {
this(null);
}
public GeneratorTypeSerializer(Class<GeneratorType> t) {
super(t);
}
@Override
public void serialize(GeneratorType value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
gen.writeStringField("id", value.getId());
gen.writeStringField("name", value.getName());
Map<String, Object> settings = new HashMap<>();
for (DataQuery query : value.getGeneratorSettings().getKeys(true)) {
Optional val = value.getGeneratorSettings().get(query);
if (!val.isPresent())
continue;
settings.put(query.asString("."), val.get());
}
gen.writeObjectField("settings", settings);
gen.writeEndObject();
}
}
|
Fix generator type serializer not serializing properties
|
fix(json): Fix generator type serializer not serializing properties
|
Java
|
mit
|
Valandur/Web-API,Valandur/Web-API,Valandur/Web-API
|
java
|
## Code Before:
package valandur.webapi.json.serializers.world;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.spongepowered.api.world.GeneratorType;
import java.io.IOException;
public class GeneratorTypeSerializer extends StdSerializer<GeneratorType> {
public GeneratorTypeSerializer() {
this(null);
}
public GeneratorTypeSerializer(Class<GeneratorType> t) {
super(t);
}
@Override
public void serialize(GeneratorType value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
gen.writeStringField("id", value.getId());
gen.writeStringField("name", value.getName());
gen.writeObjectField("settings", value.getGeneratorSettings());
gen.writeEndObject();
}
}
## Instruction:
fix(json): Fix generator type serializer not serializing properties
## Code After:
package valandur.webapi.json.serializers.world;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.spongepowered.api.data.DataQuery;
import org.spongepowered.api.world.GeneratorType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
public class GeneratorTypeSerializer extends StdSerializer<GeneratorType> {
public GeneratorTypeSerializer() {
this(null);
}
public GeneratorTypeSerializer(Class<GeneratorType> t) {
super(t);
}
@Override
public void serialize(GeneratorType value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
gen.writeStringField("id", value.getId());
gen.writeStringField("name", value.getName());
Map<String, Object> settings = new HashMap<>();
for (DataQuery query : value.getGeneratorSettings().getKeys(true)) {
Optional val = value.getGeneratorSettings().get(query);
if (!val.isPresent())
continue;
settings.put(query.asString("."), val.get());
}
gen.writeObjectField("settings", settings);
gen.writeEndObject();
}
}
|
# ... existing code ...
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.spongepowered.api.data.DataQuery;
import org.spongepowered.api.world.GeneratorType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
public class GeneratorTypeSerializer extends StdSerializer<GeneratorType> {
# ... modified code ...
gen.writeStartObject();
gen.writeStringField("id", value.getId());
gen.writeStringField("name", value.getName());
Map<String, Object> settings = new HashMap<>();
for (DataQuery query : value.getGeneratorSettings().getKeys(true)) {
Optional val = value.getGeneratorSettings().get(query);
if (!val.isPresent())
continue;
settings.put(query.asString("."), val.get());
}
gen.writeObjectField("settings", settings);
gen.writeEndObject();
}
}
# ... rest of the code ...
|
2f264b51b070ff047658cccf4076776328fb9fba
|
stroom-data/stroom-data-store-api/src/main/java/stroom/data/store/api/DataDownload.java
|
stroom-data/stroom-data-store-api/src/main/java/stroom/data/store/api/DataDownload.java
|
package stroom.data.store.api;
import stroom.meta.shared.FindMetaCriteria;
import stroom.util.shared.ResourcePaths;
import stroom.util.shared.RestResource;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Tag(name = "Data Download")
@Path("/dataDownload" + ResourcePaths.V1)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@Consumes(MediaType.APPLICATION_JSON)
public interface DataDownload extends RestResource {
/*
This method is defined separately to `DataResource` due to GWT incompatibility with the jaxax `Response` class.
*/
@POST
@Path("downloadZip")
@Operation(
summary = "Retrieve content matching the provided criteria as a zip file",
operationId = "downloadZip",
responses = {
@ApiResponse(description = "Returns Stroom content data as a zip file")
})
@Produces(MediaType.APPLICATION_OCTET_STREAM)
Response downloadZip(@Parameter(description = "criteria", required = true) FindMetaCriteria criteria);
}
|
package stroom.data.store.api;
import stroom.meta.shared.FindMetaCriteria;
import stroom.util.shared.ResourcePaths;
import stroom.util.shared.RestResource;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Tag(name = "Data Download")
@Path("/dataDownload" + ResourcePaths.V1)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public interface DataDownload extends RestResource {
/*
This method is defined separately to `DataResource` due to GWT incompatibility with the jaxax `Response` class.
*/
@POST
@Path("downloadZip")
@Operation(
summary = "Retrieve content matching the provided criteria as a zip file",
operationId = "downloadZip",
responses = {
@ApiResponse(description = "Returns Stroom content data as a zip file")
})
@Produces(MediaType.APPLICATION_OCTET_STREAM)
Response downloadZip(@Parameter(description = "criteria", required = true) FindMetaCriteria criteria);
}
|
Set class producer type to application/json
|
Set class producer type to application/json
|
Java
|
apache-2.0
|
gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom
|
java
|
## Code Before:
package stroom.data.store.api;
import stroom.meta.shared.FindMetaCriteria;
import stroom.util.shared.ResourcePaths;
import stroom.util.shared.RestResource;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Tag(name = "Data Download")
@Path("/dataDownload" + ResourcePaths.V1)
@Produces(MediaType.APPLICATION_OCTET_STREAM)
@Consumes(MediaType.APPLICATION_JSON)
public interface DataDownload extends RestResource {
/*
This method is defined separately to `DataResource` due to GWT incompatibility with the jaxax `Response` class.
*/
@POST
@Path("downloadZip")
@Operation(
summary = "Retrieve content matching the provided criteria as a zip file",
operationId = "downloadZip",
responses = {
@ApiResponse(description = "Returns Stroom content data as a zip file")
})
@Produces(MediaType.APPLICATION_OCTET_STREAM)
Response downloadZip(@Parameter(description = "criteria", required = true) FindMetaCriteria criteria);
}
## Instruction:
Set class producer type to application/json
## Code After:
package stroom.data.store.api;
import stroom.meta.shared.FindMetaCriteria;
import stroom.util.shared.ResourcePaths;
import stroom.util.shared.RestResource;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Tag(name = "Data Download")
@Path("/dataDownload" + ResourcePaths.V1)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public interface DataDownload extends RestResource {
/*
This method is defined separately to `DataResource` due to GWT incompatibility with the jaxax `Response` class.
*/
@POST
@Path("downloadZip")
@Operation(
summary = "Retrieve content matching the provided criteria as a zip file",
operationId = "downloadZip",
responses = {
@ApiResponse(description = "Returns Stroom content data as a zip file")
})
@Produces(MediaType.APPLICATION_OCTET_STREAM)
Response downloadZip(@Parameter(description = "criteria", required = true) FindMetaCriteria criteria);
}
|
// ... existing code ...
@Tag(name = "Data Download")
@Path("/dataDownload" + ResourcePaths.V1)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public interface DataDownload extends RestResource {
// ... rest of the code ...
|
99f53e007aac85aba162136dfa8ce131c965308b
|
pale/__init__.py
|
pale/__init__.py
|
import inspect
import types
import adapters
import arguments
import config
import context
from endpoint import Endpoint
from resource import NoContentResource, Resource, ResourceList
ImplementationModule = "_pale__api_implementation"
def is_pale_module(obj):
is_it = isinstance(obj, types.ModuleType) and \
hasattr(obj, '_module_type') and \
obj._module_type == ImplementationModule
return is_it
def extract_endpoints(api_module):
"""Iterates through an api implementation module to extract and instantiate
endpoint objects to be passed to the HTTP-layer's router.
"""
if not hasattr(api_module, 'endpoints'):
raise ValueError(("pale.extract_endpoints expected the passed in "
"api_module to have an `endpoints` attribute, but it didn't!"))
classes = [v for (k,v) in inspect.getmembers(api_module.endpoints,
inspect.isclass)]
instances = []
for cls in classes:
if Endpoint in cls.__bases__:
instances.append(cls())
return instances
|
import inspect
import types
from . import adapters
from . import arguments
from . import config
from . import context
from .endpoint import Endpoint
from .resource import NoContentResource, Resource, ResourceList
ImplementationModule = "_pale__api_implementation"
def is_pale_module(obj):
is_it = isinstance(obj, types.ModuleType) and \
hasattr(obj, '_module_type') and \
obj._module_type == ImplementationModule
return is_it
def extract_endpoints(api_module):
"""Iterates through an api implementation module to extract and instantiate
endpoint objects to be passed to the HTTP-layer's router.
"""
if not hasattr(api_module, 'endpoints'):
raise ValueError(("pale.extract_endpoints expected the passed in "
"api_module to have an `endpoints` attribute, but it didn't!"))
classes = [v for (k,v) in inspect.getmembers(api_module.endpoints,
inspect.isclass)]
instances = []
for cls in classes:
if Endpoint in cls.__bases__:
instances.append(cls())
return instances
|
Add dots to pale things
|
Add dots to pale things
|
Python
|
mit
|
Loudr/pale
|
python
|
## Code Before:
import inspect
import types
import adapters
import arguments
import config
import context
from endpoint import Endpoint
from resource import NoContentResource, Resource, ResourceList
ImplementationModule = "_pale__api_implementation"
def is_pale_module(obj):
is_it = isinstance(obj, types.ModuleType) and \
hasattr(obj, '_module_type') and \
obj._module_type == ImplementationModule
return is_it
def extract_endpoints(api_module):
"""Iterates through an api implementation module to extract and instantiate
endpoint objects to be passed to the HTTP-layer's router.
"""
if not hasattr(api_module, 'endpoints'):
raise ValueError(("pale.extract_endpoints expected the passed in "
"api_module to have an `endpoints` attribute, but it didn't!"))
classes = [v for (k,v) in inspect.getmembers(api_module.endpoints,
inspect.isclass)]
instances = []
for cls in classes:
if Endpoint in cls.__bases__:
instances.append(cls())
return instances
## Instruction:
Add dots to pale things
## Code After:
import inspect
import types
from . import adapters
from . import arguments
from . import config
from . import context
from .endpoint import Endpoint
from .resource import NoContentResource, Resource, ResourceList
ImplementationModule = "_pale__api_implementation"
def is_pale_module(obj):
is_it = isinstance(obj, types.ModuleType) and \
hasattr(obj, '_module_type') and \
obj._module_type == ImplementationModule
return is_it
def extract_endpoints(api_module):
"""Iterates through an api implementation module to extract and instantiate
endpoint objects to be passed to the HTTP-layer's router.
"""
if not hasattr(api_module, 'endpoints'):
raise ValueError(("pale.extract_endpoints expected the passed in "
"api_module to have an `endpoints` attribute, but it didn't!"))
classes = [v for (k,v) in inspect.getmembers(api_module.endpoints,
inspect.isclass)]
instances = []
for cls in classes:
if Endpoint in cls.__bases__:
instances.append(cls())
return instances
|
# ... existing code ...
import inspect
import types
from . import adapters
from . import arguments
from . import config
from . import context
from .endpoint import Endpoint
from .resource import NoContentResource, Resource, ResourceList
ImplementationModule = "_pale__api_implementation"
# ... rest of the code ...
|
12afe43b0f2599b0c79fab8bb0af454ccf16e57f
|
gittip/orm/__init__.py
|
gittip/orm/__init__.py
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
Remove the convenience functions, reorganize around the SQLAlchemy class
|
Remove the convenience functions, reorganize around the SQLAlchemy class
|
Python
|
cc0-1.0
|
bountysource/www.gittip.com,eXcomm/gratipay.com,bountysource/www.gittip.com,studio666/gratipay.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,bountysource/www.gittip.com,gratipay/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,eXcomm/gratipay.com,bountysource/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,gratipay/gratipay.com,MikeFair/www.gittip.com,studio666/gratipay.com
|
python
|
## Code Before:
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()
## Instruction:
Remove the convenience functions, reorganize around the SQLAlchemy class
## Code After:
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
// ... existing code ...
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
// ... modified code ...
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
// ... rest of the code ...
|
1cda977eff5a2edaa0de82882ef2e7d1611329b7
|
tests/test_protocol.py
|
tests/test_protocol.py
|
import pytest
class TestProtocol:
@pytest.mark.asyncio
def test_server_hello(self, ws_client_factory, get_unencrypted_packet):
"""
The server must send a valid `server-hello` on connection.
"""
client = yield from ws_client_factory()
receiver, message = yield from get_unencrypted_packet(client)
assert receiver == 0x00
assert message['type'] == 'server-hello'
assert len(message['key']) == 32
assert len(message['my-cookie']) == 16
yield from client.close()
|
import asyncio
import pytest
import saltyrtc
class TestProtocol:
@pytest.mark.asyncio
def test_no_subprotocols(self, ws_client_factory):
"""
The server must drop the client after the connection has been
established with a close code of *1002*.
"""
client = yield from ws_client_factory(subprotocols=None)
yield from asyncio.sleep(0.05)
assert not client.open
assert client.close_code == saltyrtc.CloseCode.sub_protocol_error
@pytest.mark.asyncio
def test_invalid_subprotocols(self, ws_client_factory):
"""
The server must drop the client after the connection has been
established with a close code of *1002*.
"""
client = yield from ws_client_factory(subprotocols=['kittie-protocol-3000'])
yield from asyncio.sleep(0.05)
assert not client.open
assert client.close_code == saltyrtc.CloseCode.sub_protocol_error
@pytest.mark.asyncio
def test_server_hello(self, ws_client_factory, get_unencrypted_packet):
"""
The server must send a valid `server-hello` on connection.
"""
client = yield from ws_client_factory()
receiver, message = yield from get_unencrypted_packet(client)
assert receiver == 0x00
assert message['type'] == 'server-hello'
assert len(message['key']) == 32
assert len(message['my-cookie']) == 16
yield from client.close()
|
Add tests for invalid and no provided sub-protocols
|
Add tests for invalid and no provided sub-protocols
|
Python
|
mit
|
saltyrtc/saltyrtc-server-python,saltyrtc/saltyrtc-server-python
|
python
|
## Code Before:
import pytest
class TestProtocol:
@pytest.mark.asyncio
def test_server_hello(self, ws_client_factory, get_unencrypted_packet):
"""
The server must send a valid `server-hello` on connection.
"""
client = yield from ws_client_factory()
receiver, message = yield from get_unencrypted_packet(client)
assert receiver == 0x00
assert message['type'] == 'server-hello'
assert len(message['key']) == 32
assert len(message['my-cookie']) == 16
yield from client.close()
## Instruction:
Add tests for invalid and no provided sub-protocols
## Code After:
import asyncio
import pytest
import saltyrtc
class TestProtocol:
@pytest.mark.asyncio
def test_no_subprotocols(self, ws_client_factory):
"""
The server must drop the client after the connection has been
established with a close code of *1002*.
"""
client = yield from ws_client_factory(subprotocols=None)
yield from asyncio.sleep(0.05)
assert not client.open
assert client.close_code == saltyrtc.CloseCode.sub_protocol_error
@pytest.mark.asyncio
def test_invalid_subprotocols(self, ws_client_factory):
"""
The server must drop the client after the connection has been
established with a close code of *1002*.
"""
client = yield from ws_client_factory(subprotocols=['kittie-protocol-3000'])
yield from asyncio.sleep(0.05)
assert not client.open
assert client.close_code == saltyrtc.CloseCode.sub_protocol_error
@pytest.mark.asyncio
def test_server_hello(self, ws_client_factory, get_unencrypted_packet):
"""
The server must send a valid `server-hello` on connection.
"""
client = yield from ws_client_factory()
receiver, message = yield from get_unencrypted_packet(client)
assert receiver == 0x00
assert message['type'] == 'server-hello'
assert len(message['key']) == 32
assert len(message['my-cookie']) == 16
yield from client.close()
|
// ... existing code ...
import asyncio
import pytest
import saltyrtc
class TestProtocol:
@pytest.mark.asyncio
def test_no_subprotocols(self, ws_client_factory):
"""
The server must drop the client after the connection has been
established with a close code of *1002*.
"""
client = yield from ws_client_factory(subprotocols=None)
yield from asyncio.sleep(0.05)
assert not client.open
assert client.close_code == saltyrtc.CloseCode.sub_protocol_error
@pytest.mark.asyncio
def test_invalid_subprotocols(self, ws_client_factory):
"""
The server must drop the client after the connection has been
established with a close code of *1002*.
"""
client = yield from ws_client_factory(subprotocols=['kittie-protocol-3000'])
yield from asyncio.sleep(0.05)
assert not client.open
assert client.close_code == saltyrtc.CloseCode.sub_protocol_error
@pytest.mark.asyncio
def test_server_hello(self, ws_client_factory, get_unencrypted_packet):
"""
// ... rest of the code ...
|
ce7c31f3dd97716051b72951c7c745dd2c63efcd
|
plugins/audit_logs/server/__init__.py
|
plugins/audit_logs/server/__init__.py
|
import cherrypy
import logging
from girder import auditLogger
from girder.models.model_base import Model
from girder.api.rest import getCurrentUser
class Record(Model):
def initialize(self):
self.name = 'audit_log_record'
def validate(self, doc):
return doc
class AuditLogHandler(logging.Handler):
def handle(self, record):
user = getCurrentUser()
Record().save({
'type': record.msg,
'details': record.details,
'ip': cherrypy.request.remote.ip,
'userId': user and user['_id']
})
def load(info):
auditLogger.addHandler(AuditLogHandler())
|
import cherrypy
import datetime
import logging
from girder import auditLogger
from girder.models.model_base import Model
from girder.api.rest import getCurrentUser
class Record(Model):
def initialize(self):
self.name = 'audit_log_record'
def validate(self, doc):
return doc
class AuditLogHandler(logging.Handler):
def handle(self, record):
user = getCurrentUser()
Record().save({
'type': record.msg,
'details': record.details,
'ip': cherrypy.request.remote.ip,
'userId': user and user['_id'],
'when': datetime.datetime.utcnow()
})
def load(info):
auditLogger.addHandler(AuditLogHandler())
|
Include timestamp in audit logs
|
Include timestamp in audit logs
|
Python
|
apache-2.0
|
RafaelPalomar/girder,data-exp-lab/girder,RafaelPalomar/girder,data-exp-lab/girder,girder/girder,kotfic/girder,RafaelPalomar/girder,Kitware/girder,manthey/girder,jbeezley/girder,manthey/girder,girder/girder,data-exp-lab/girder,kotfic/girder,RafaelPalomar/girder,Kitware/girder,Kitware/girder,data-exp-lab/girder,kotfic/girder,girder/girder,manthey/girder,jbeezley/girder,data-exp-lab/girder,manthey/girder,girder/girder,Kitware/girder,RafaelPalomar/girder,kotfic/girder,kotfic/girder,jbeezley/girder,jbeezley/girder
|
python
|
## Code Before:
import cherrypy
import logging
from girder import auditLogger
from girder.models.model_base import Model
from girder.api.rest import getCurrentUser
class Record(Model):
def initialize(self):
self.name = 'audit_log_record'
def validate(self, doc):
return doc
class AuditLogHandler(logging.Handler):
def handle(self, record):
user = getCurrentUser()
Record().save({
'type': record.msg,
'details': record.details,
'ip': cherrypy.request.remote.ip,
'userId': user and user['_id']
})
def load(info):
auditLogger.addHandler(AuditLogHandler())
## Instruction:
Include timestamp in audit logs
## Code After:
import cherrypy
import datetime
import logging
from girder import auditLogger
from girder.models.model_base import Model
from girder.api.rest import getCurrentUser
class Record(Model):
def initialize(self):
self.name = 'audit_log_record'
def validate(self, doc):
return doc
class AuditLogHandler(logging.Handler):
def handle(self, record):
user = getCurrentUser()
Record().save({
'type': record.msg,
'details': record.details,
'ip': cherrypy.request.remote.ip,
'userId': user and user['_id'],
'when': datetime.datetime.utcnow()
})
def load(info):
auditLogger.addHandler(AuditLogHandler())
|
# ... existing code ...
import cherrypy
import datetime
import logging
from girder import auditLogger
from girder.models.model_base import Model
# ... modified code ...
'type': record.msg,
'details': record.details,
'ip': cherrypy.request.remote.ip,
'userId': user and user['_id'],
'when': datetime.datetime.utcnow()
})
# ... rest of the code ...
|
2e95901ee37100f855a5f30e6143920ef2b56904
|
odinweb/_compat.py
|
odinweb/_compat.py
|
from __future__ import unicode_literals
import sys
__all__ = (
'PY2', 'PY3',
'string_types', 'integer_types', 'text_type', 'binary_type',
'range', 'with_metaclass'
)
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
text_type = str
binary_type = bytes
else:
string_types = basestring,
integer_types = (int, long)
text_type = unicode
binary_type = str
if PY2:
range = xrange
else:
range = range
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
|
import sys
__all__ = (
'PY2', 'PY3',
'string_types', 'integer_types', 'text_type', 'binary_type',
'range', 'with_metaclass'
)
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
text_type = str
binary_type = bytes
else:
string_types = basestring,
integer_types = (int, long)
text_type = unicode
binary_type = str
if PY2:
range = xrange
else:
range = range
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
|
Remove unicode literals to fix with_metaclass method
|
Remove unicode literals to fix with_metaclass method
|
Python
|
bsd-3-clause
|
python-odin/odinweb,python-odin/odinweb
|
python
|
## Code Before:
from __future__ import unicode_literals
import sys
__all__ = (
'PY2', 'PY3',
'string_types', 'integer_types', 'text_type', 'binary_type',
'range', 'with_metaclass'
)
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
text_type = str
binary_type = bytes
else:
string_types = basestring,
integer_types = (int, long)
text_type = unicode
binary_type = str
if PY2:
range = xrange
else:
range = range
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
## Instruction:
Remove unicode literals to fix with_metaclass method
## Code After:
import sys
__all__ = (
'PY2', 'PY3',
'string_types', 'integer_types', 'text_type', 'binary_type',
'range', 'with_metaclass'
)
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
text_type = str
binary_type = bytes
else:
string_types = basestring,
integer_types = (int, long)
text_type = unicode
binary_type = str
if PY2:
range = xrange
else:
range = range
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
|
...
import sys
__all__ = (
...
|
eb96745cf26dda4e032e81abddec336d9487914c
|
command-side/src/main/java/br/holandajunior/workday/repositories/IUserRepository.java
|
command-side/src/main/java/br/holandajunior/workday/repositories/IUserRepository.java
|
package br.holandajunior.workday.repositories;
import br.holandajunior.workday.models.User;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by holandajunior on 29/04/17.
*/
public interface IUserRepository extends JpaRepository< User, Long > {}
|
package br.holandajunior.workday.repositories;
import br.holandajunior.workday.models.repository.User;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by holandajunior on 29/04/17.
*/
public interface IUserRepository extends JpaRepository< User, Long > {
User findByUsername( String username );
}
|
Add into repository to find user by username
|
Add into repository to find user by username
|
Java
|
mit
|
holandajunior/workaday,holandajunior/workaday
|
java
|
## Code Before:
package br.holandajunior.workday.repositories;
import br.holandajunior.workday.models.User;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by holandajunior on 29/04/17.
*/
public interface IUserRepository extends JpaRepository< User, Long > {}
## Instruction:
Add into repository to find user by username
## Code After:
package br.holandajunior.workday.repositories;
import br.holandajunior.workday.models.repository.User;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by holandajunior on 29/04/17.
*/
public interface IUserRepository extends JpaRepository< User, Long > {
User findByUsername( String username );
}
|
...
package br.holandajunior.workday.repositories;
import br.holandajunior.workday.models.repository.User;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by holandajunior on 29/04/17.
*/
public interface IUserRepository extends JpaRepository< User, Long > {
User findByUsername( String username );
}
...
|
889da6f3b28f7590a4520c5cde310ef544b7b74a
|
worker-svc/src/main/java/com/palominolabs/benchpress/worker/WorkerMainModule.java
|
worker-svc/src/main/java/com/palominolabs/benchpress/worker/WorkerMainModule.java
|
package com.palominolabs.benchpress.worker;
import com.google.inject.AbstractModule;
import com.palominolabs.benchpress.ipc.IpcHttpClientModule;
import com.palominolabs.benchpress.ipc.IpcJsonModule;
import com.palominolabs.benchpress.job.registry.JobRegistryModule;
import com.palominolabs.benchpress.job.task.TaskPluginRegistryModule;
import com.palominolabs.benchpress.task.reporting.TaskProgressClientModule;
import com.palominolabs.benchpress.worker.http.WorkerResourceModule;
import com.palominolabs.benchpress.curator.CuratorModule;
import com.palominolabs.config.ConfigModule;
import com.palominolabs.config.ConfigModuleBuilder;
import org.apache.commons.configuration.SystemConfiguration;
public final class WorkerMainModule extends AbstractModule {
@Override
protected void configure() {
binder().requireExplicitBindings();
bind(WorkerMain.class);
install(new WorkerResourceModule());
install(new ConfigModuleBuilder().addConfiguration(new SystemConfiguration()).build());
install(new CuratorModule());
install(new IpcHttpClientModule());
install(new IpcJsonModule());
install(new TaskProgressClientModule());
install(new JobRegistryModule());
install(new QueueProviderModule());
bind(PartitionRunner.class);
ConfigModule.bindConfigBean(binder(), WorkerConfig.class);
install(new TaskPluginRegistryModule());
}
}
|
package com.palominolabs.benchpress.worker;
import com.google.inject.AbstractModule;
import com.palominolabs.benchpress.ipc.IpcHttpClientModule;
import com.palominolabs.benchpress.ipc.IpcJsonModule;
import com.palominolabs.benchpress.jersey.JerseySupportModule;
import com.palominolabs.benchpress.job.registry.JobRegistryModule;
import com.palominolabs.benchpress.job.task.TaskPluginRegistryModule;
import com.palominolabs.benchpress.task.reporting.TaskProgressClientModule;
import com.palominolabs.benchpress.worker.http.WorkerResourceModule;
import com.palominolabs.benchpress.curator.CuratorModule;
import com.palominolabs.config.ConfigModule;
import com.palominolabs.config.ConfigModuleBuilder;
import org.apache.commons.configuration.SystemConfiguration;
public final class WorkerMainModule extends AbstractModule {
@Override
protected void configure() {
binder().requireExplicitBindings();
bind(WorkerMain.class);
install(new WorkerResourceModule());
install(new ConfigModuleBuilder().addConfiguration(new SystemConfiguration()).build());
install(new CuratorModule());
install(new JerseySupportModule());
install(new IpcHttpClientModule());
install(new IpcJsonModule());
install(new TaskProgressClientModule());
install(new JobRegistryModule());
install(new QueueProviderModule());
bind(PartitionRunner.class);
ConfigModule.bindConfigBean(binder(), WorkerConfig.class);
install(new TaskPluginRegistryModule());
}
}
|
Fix worker service: needs jersey binding
|
Fix worker service: needs jersey binding
|
Java
|
apache-2.0
|
palominolabs/benchpress
|
java
|
## Code Before:
package com.palominolabs.benchpress.worker;
import com.google.inject.AbstractModule;
import com.palominolabs.benchpress.ipc.IpcHttpClientModule;
import com.palominolabs.benchpress.ipc.IpcJsonModule;
import com.palominolabs.benchpress.job.registry.JobRegistryModule;
import com.palominolabs.benchpress.job.task.TaskPluginRegistryModule;
import com.palominolabs.benchpress.task.reporting.TaskProgressClientModule;
import com.palominolabs.benchpress.worker.http.WorkerResourceModule;
import com.palominolabs.benchpress.curator.CuratorModule;
import com.palominolabs.config.ConfigModule;
import com.palominolabs.config.ConfigModuleBuilder;
import org.apache.commons.configuration.SystemConfiguration;
public final class WorkerMainModule extends AbstractModule {
@Override
protected void configure() {
binder().requireExplicitBindings();
bind(WorkerMain.class);
install(new WorkerResourceModule());
install(new ConfigModuleBuilder().addConfiguration(new SystemConfiguration()).build());
install(new CuratorModule());
install(new IpcHttpClientModule());
install(new IpcJsonModule());
install(new TaskProgressClientModule());
install(new JobRegistryModule());
install(new QueueProviderModule());
bind(PartitionRunner.class);
ConfigModule.bindConfigBean(binder(), WorkerConfig.class);
install(new TaskPluginRegistryModule());
}
}
## Instruction:
Fix worker service: needs jersey binding
## Code After:
package com.palominolabs.benchpress.worker;
import com.google.inject.AbstractModule;
import com.palominolabs.benchpress.ipc.IpcHttpClientModule;
import com.palominolabs.benchpress.ipc.IpcJsonModule;
import com.palominolabs.benchpress.jersey.JerseySupportModule;
import com.palominolabs.benchpress.job.registry.JobRegistryModule;
import com.palominolabs.benchpress.job.task.TaskPluginRegistryModule;
import com.palominolabs.benchpress.task.reporting.TaskProgressClientModule;
import com.palominolabs.benchpress.worker.http.WorkerResourceModule;
import com.palominolabs.benchpress.curator.CuratorModule;
import com.palominolabs.config.ConfigModule;
import com.palominolabs.config.ConfigModuleBuilder;
import org.apache.commons.configuration.SystemConfiguration;
public final class WorkerMainModule extends AbstractModule {
@Override
protected void configure() {
binder().requireExplicitBindings();
bind(WorkerMain.class);
install(new WorkerResourceModule());
install(new ConfigModuleBuilder().addConfiguration(new SystemConfiguration()).build());
install(new CuratorModule());
install(new JerseySupportModule());
install(new IpcHttpClientModule());
install(new IpcJsonModule());
install(new TaskProgressClientModule());
install(new JobRegistryModule());
install(new QueueProviderModule());
bind(PartitionRunner.class);
ConfigModule.bindConfigBean(binder(), WorkerConfig.class);
install(new TaskPluginRegistryModule());
}
}
|
...
import com.google.inject.AbstractModule;
import com.palominolabs.benchpress.ipc.IpcHttpClientModule;
import com.palominolabs.benchpress.ipc.IpcJsonModule;
import com.palominolabs.benchpress.jersey.JerseySupportModule;
import com.palominolabs.benchpress.job.registry.JobRegistryModule;
import com.palominolabs.benchpress.job.task.TaskPluginRegistryModule;
import com.palominolabs.benchpress.task.reporting.TaskProgressClientModule;
...
install(new CuratorModule());
install(new JerseySupportModule());
install(new IpcHttpClientModule());
install(new IpcJsonModule());
install(new TaskProgressClientModule());
...
|
c7d2e917df5e0c2182e351b5157271b6e62a06cd
|
app/soc/modules/gsoc/models/timeline.py
|
app/soc/modules/gsoc/models/timeline.py
|
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Application Review Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Student Application Matched Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Organizations Review Student Applications Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Students Matched to Mentors Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
Change verbage on program profile info.
|
Change verbage on program profile info.
Fixes issue 1601.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
python
|
## Code Before:
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Application Review Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Student Application Matched Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
## Instruction:
Change verbage on program profile info.
Fixes issue 1601.
## Code After:
from google.appengine.ext import db
from django.utils.translation import ugettext
import soc.models.timeline
class GSoCTimeline(soc.models.timeline.Timeline):
"""GSoC Timeline model extends the basic Program Timeline model.
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Organizations Review Student Applications Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Students Matched to Mentors Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
|
# ... existing code ...
"""
application_review_deadline = db.DateTimeProperty(
verbose_name=ugettext('Organizations Review Student Applications Deadline'))
student_application_matched_deadline = db.DateTimeProperty(
verbose_name=ugettext('Students Matched to Mentors Deadline'))
accepted_students_announced_deadline = db.DateTimeProperty(
verbose_name=ugettext('Accepted Students Announced Deadline'))
# ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.