commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
1c4309b1dd21d28cbe25266c1eca023ced12a892
|
liquibase-core/src/main/java/liquibase/datatype/core/TinyIntType.java
|
liquibase-core/src/main/java/liquibase/datatype/core/TinyIntType.java
|
package liquibase.datatype.core;
import liquibase.database.Database;
import liquibase.database.core.DerbyDatabase;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.database.core.PostgresDatabase;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
@DataTypeInfo(name="tinyint", aliases = "java.sql.Types.TINYINT", minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class TinyIntType extends LiquibaseDataType {
private boolean autoIncrement;
public boolean isAutoIncrement() {
return autoIncrement;
}
public void setAutoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
}
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DerbyDatabase || database instanceof PostgresDatabase) {
return new DatabaseDataType("SMALLINT");
}
if (database instanceof MSSQLDatabase) {
return new DatabaseDataType("TINYINT");
}
if (database instanceof OracleDatabase) {
return new DatabaseDataType("NUMBER",3);
}
return super.toDatabaseDataType(database);
}
}
|
package liquibase.datatype.core;
import liquibase.database.Database;
import liquibase.database.core.DerbyDatabase;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.database.core.PostgresDatabase;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
@DataTypeInfo(name="tinyint", aliases = "java.sql.Types.TINYINT", minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class TinyIntType extends LiquibaseDataType {
private boolean autoIncrement;
public boolean isAutoIncrement() {
return autoIncrement;
}
public void setAutoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
}
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DerbyDatabase || database instanceof PostgresDatabase || database instanceof FirebirdDatabase) {
return new DatabaseDataType("SMALLINT");
}
if (database instanceof MSSQLDatabase) {
return new DatabaseDataType("TINYINT");
}
if (database instanceof OracleDatabase) {
return new DatabaseDataType("NUMBER",3);
}
return super.toDatabaseDataType(database);
}
}
|
Use smallint for Firebird if tinyint is requested
|
Use smallint for Firebird if tinyint is requested
|
Java
|
apache-2.0
|
dyk/liquibase,mattbertolini/liquibase,liquibase/liquibase,vast-engineering/liquibase,russ-p/liquibase,foxel/liquibase,foxel/liquibase,gquintana/liquibase,talklittle/liquibase,maberle/liquibase,syncron/liquibase,vfpfafrf/liquibase,tjardo83/liquibase,pellcorp/liquibase,fbiville/liquibase,EVODelavega/liquibase,adriens/liquibase,talklittle/liquibase,danielkec/liquibase,CoderPaulK/liquibase,hbogaards/liquibase,dprguard2000/liquibase,NSIT/liquibase,adriens/liquibase,dbmanul/dbmanul,vbekiaris/liquibase,vast-engineering/liquibase,cleiter/liquibase,Willem1987/liquibase,C0mmi3/liquibase,mbreslow/liquibase,evigeant/liquibase,fossamagna/liquibase,cbotiza/liquibase,mwaylabs/liquibase,Willem1987/liquibase,mattbertolini/liquibase,mbreslow/liquibase,EVODelavega/liquibase,dprguard2000/liquibase,fbiville/liquibase,danielkec/liquibase,NSIT/liquibase,cbotiza/liquibase,dprguard2000/liquibase,fbiville/liquibase,Datical/liquibase,liquibase/liquibase,instantdelay/liquibase,instantdelay/liquibase,foxel/liquibase,mbreslow/liquibase,russ-p/liquibase,balazs-zsoldos/liquibase,FreshGrade/liquibase,vfpfafrf/liquibase,jimmycd/liquibase,cleiter/liquibase,danielkec/liquibase,fossamagna/liquibase,maberle/liquibase,EVODelavega/liquibase,gquintana/liquibase,hbogaards/liquibase,ArloL/liquibase,balazs-zsoldos/liquibase,syncron/liquibase,dyk/liquibase,CoderPaulK/liquibase,vfpfafrf/liquibase,mattbertolini/liquibase,tjardo83/liquibase,iherasymenko/liquibase,dbmanul/dbmanul,iherasymenko/liquibase,hbogaards/liquibase,evigeant/liquibase,FreshGrade/liquibase,FreshGrade/liquibase,jimmycd/liquibase,CoderPaulK/liquibase,cleiter/liquibase,danielkec/liquibase,OpenCST/liquibase,dprguard2000/liquibase,mwaylabs/liquibase,gquintana/liquibase,syncron/liquibase,vast-engineering/liquibase,pellcorp/liquibase,CoderPaulK/liquibase,vbekiaris/liquibase,C0mmi3/liquibase,klopfdreh/liquibase,hbogaards/liquibase,talklittle/liquibase,vbekiaris/liquibase,dbmanul/dbmanul,instantdelay/liquibase,instantdelay/liquibase,maberle/liquibase,lazaronixon/liquibase,mwaylabs/liquibase,maberle/liquibase,mortegac/liquibase,russ-p/liquibase,EVODelavega/liquibase,syncron/liquibase,Datical/liquibase,iherasymenko/liquibase,NSIT/liquibase,Willem1987/liquibase,klopfdreh/liquibase,lazaronixon/liquibase,evigeant/liquibase,pellcorp/liquibase,NSIT/liquibase,dyk/liquibase,vast-engineering/liquibase,cbotiza/liquibase,Datical/liquibase,fbiville/liquibase,cbotiza/liquibase,cleiter/liquibase,jimmycd/liquibase,klopfdreh/liquibase,OpenCST/liquibase,tjardo83/liquibase,lazaronixon/liquibase,talklittle/liquibase,dbmanul/dbmanul,liquibase/liquibase,foxel/liquibase,mortegac/liquibase,balazs-zsoldos/liquibase,dyk/liquibase,ArloL/liquibase,FreshGrade/liquibase,mortegac/liquibase,ivaylo5ev/liquibase,balazs-zsoldos/liquibase,mbreslow/liquibase,fossamagna/liquibase,mortegac/liquibase,Datical/liquibase,ivaylo5ev/liquibase,lazaronixon/liquibase,adriens/liquibase,pellcorp/liquibase,iherasymenko/liquibase,mattbertolini/liquibase,mwaylabs/liquibase,evigeant/liquibase,gquintana/liquibase,C0mmi3/liquibase,jimmycd/liquibase,vbekiaris/liquibase,OpenCST/liquibase,ArloL/liquibase,russ-p/liquibase,vfpfafrf/liquibase,tjardo83/liquibase,klopfdreh/liquibase,C0mmi3/liquibase,OpenCST/liquibase,Willem1987/liquibase
|
java
|
## Code Before:
package liquibase.datatype.core;
import liquibase.database.Database;
import liquibase.database.core.DerbyDatabase;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.database.core.PostgresDatabase;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
@DataTypeInfo(name="tinyint", aliases = "java.sql.Types.TINYINT", minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class TinyIntType extends LiquibaseDataType {
private boolean autoIncrement;
public boolean isAutoIncrement() {
return autoIncrement;
}
public void setAutoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
}
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DerbyDatabase || database instanceof PostgresDatabase) {
return new DatabaseDataType("SMALLINT");
}
if (database instanceof MSSQLDatabase) {
return new DatabaseDataType("TINYINT");
}
if (database instanceof OracleDatabase) {
return new DatabaseDataType("NUMBER",3);
}
return super.toDatabaseDataType(database);
}
}
## Instruction:
Use smallint for Firebird if tinyint is requested
## Code After:
package liquibase.datatype.core;
import liquibase.database.Database;
import liquibase.database.core.DerbyDatabase;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.database.core.PostgresDatabase;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
@DataTypeInfo(name="tinyint", aliases = "java.sql.Types.TINYINT", minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class TinyIntType extends LiquibaseDataType {
private boolean autoIncrement;
public boolean isAutoIncrement() {
return autoIncrement;
}
public void setAutoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
}
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DerbyDatabase || database instanceof PostgresDatabase || database instanceof FirebirdDatabase) {
return new DatabaseDataType("SMALLINT");
}
if (database instanceof MSSQLDatabase) {
return new DatabaseDataType("TINYINT");
}
if (database instanceof OracleDatabase) {
return new DatabaseDataType("NUMBER",3);
}
return super.toDatabaseDataType(database);
}
}
|
...
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof DerbyDatabase || database instanceof PostgresDatabase || database instanceof FirebirdDatabase) {
return new DatabaseDataType("SMALLINT");
}
if (database instanceof MSSQLDatabase) {
...
|
3d1c4c3bd3dd6ae48e75772a2f2706d6104d189c
|
googkit.py
|
googkit.py
|
import os
import sys
from commands.apply_config import ApplyConfigCommand
from commands.compile import CompileCommand
from commands.init import InitCommand
from commands.setup import SetupCommand
from commands.update_deps import UpdateDepsCommand
from lib.config import Config
from lib.error import GoogkitError
CONFIG = 'googkit.cfg'
COMMANDS_DICT = {
'apply-config': [ApplyConfigCommand, UpdateDepsCommand],
'compile': [CompileCommand],
'init': [InitCommand],
'setup': [SetupCommand, UpdateDepsCommand],
'update-deps': [UpdateDepsCommand]}
def print_help():
print('Usage: googkit command')
print('')
print('Available subcommands:')
for name in sorted(COMMANDS_DICT.keys()):
print(' ' + name)
if __name__ == '__main__':
if len(sys.argv) != 2:
print_help()
sys.exit()
subcommand_classes = COMMANDS_DICT.get(sys.argv[1])
if subcommand_classes is None:
print_help()
sys.exit()
config = Config()
try:
config.load(CONFIG)
except IOError:
config = None
try:
for klass in subcommand_classes:
subcommand = klass(config)
subcommand.run()
except GoogkitError, e:
sys.exit('[ERROR] ' + str(e))
|
import os
import sys
from commands.apply_config import ApplyConfigCommand
from commands.compile import CompileCommand
from commands.init import InitCommand
from commands.setup import SetupCommand
from commands.update_deps import UpdateDepsCommand
from lib.config import Config
from lib.error import GoogkitError
CONFIG = 'googkit.cfg'
COMMANDS_DICT = {
'apply-config': [ApplyConfigCommand, UpdateDepsCommand],
'compile': [CompileCommand],
'init': [InitCommand],
'setup': [SetupCommand, UpdateDepsCommand],
'update-deps': [UpdateDepsCommand]}
def print_help():
print('Usage: googkit command')
print('')
print('Available subcommands:')
for name in sorted(COMMANDS_DICT.keys()):
print(' ' + name)
if __name__ == '__main__':
if len(sys.argv) != 2:
print_help()
sys.exit()
subcommand_classes = COMMANDS_DICT.get(sys.argv[1])
if subcommand_classes is None:
print_help()
sys.exit()
config = Config()
try:
while os.path.exists(os.path.relpath(CONFIG)):
before = os.getcwd()
os.chdir('..')
# Break if current dir is root.
if before == os.getcwd():
break
config.load(CONFIG)
except IOError:
config = None
try:
for klass in subcommand_classes:
subcommand = klass(config)
subcommand.run()
except GoogkitError, e:
sys.exit('[ERROR] ' + str(e))
|
Support making available to exec cmd on sub dir
|
Support making available to exec cmd on sub dir
|
Python
|
mit
|
googkit/googkit,googkit/googkit,googkit/googkit
|
python
|
## Code Before:
import os
import sys
from commands.apply_config import ApplyConfigCommand
from commands.compile import CompileCommand
from commands.init import InitCommand
from commands.setup import SetupCommand
from commands.update_deps import UpdateDepsCommand
from lib.config import Config
from lib.error import GoogkitError
CONFIG = 'googkit.cfg'
COMMANDS_DICT = {
'apply-config': [ApplyConfigCommand, UpdateDepsCommand],
'compile': [CompileCommand],
'init': [InitCommand],
'setup': [SetupCommand, UpdateDepsCommand],
'update-deps': [UpdateDepsCommand]}
def print_help():
print('Usage: googkit command')
print('')
print('Available subcommands:')
for name in sorted(COMMANDS_DICT.keys()):
print(' ' + name)
if __name__ == '__main__':
if len(sys.argv) != 2:
print_help()
sys.exit()
subcommand_classes = COMMANDS_DICT.get(sys.argv[1])
if subcommand_classes is None:
print_help()
sys.exit()
config = Config()
try:
config.load(CONFIG)
except IOError:
config = None
try:
for klass in subcommand_classes:
subcommand = klass(config)
subcommand.run()
except GoogkitError, e:
sys.exit('[ERROR] ' + str(e))
## Instruction:
Support making available to exec cmd on sub dir
## Code After:
import os
import sys
from commands.apply_config import ApplyConfigCommand
from commands.compile import CompileCommand
from commands.init import InitCommand
from commands.setup import SetupCommand
from commands.update_deps import UpdateDepsCommand
from lib.config import Config
from lib.error import GoogkitError
CONFIG = 'googkit.cfg'
COMMANDS_DICT = {
'apply-config': [ApplyConfigCommand, UpdateDepsCommand],
'compile': [CompileCommand],
'init': [InitCommand],
'setup': [SetupCommand, UpdateDepsCommand],
'update-deps': [UpdateDepsCommand]}
def print_help():
print('Usage: googkit command')
print('')
print('Available subcommands:')
for name in sorted(COMMANDS_DICT.keys()):
print(' ' + name)
if __name__ == '__main__':
if len(sys.argv) != 2:
print_help()
sys.exit()
subcommand_classes = COMMANDS_DICT.get(sys.argv[1])
if subcommand_classes is None:
print_help()
sys.exit()
config = Config()
try:
while os.path.exists(os.path.relpath(CONFIG)):
before = os.getcwd()
os.chdir('..')
# Break if current dir is root.
if before == os.getcwd():
break
config.load(CONFIG)
except IOError:
config = None
try:
for klass in subcommand_classes:
subcommand = klass(config)
subcommand.run()
except GoogkitError, e:
sys.exit('[ERROR] ' + str(e))
|
...
config = Config()
try:
while os.path.exists(os.path.relpath(CONFIG)):
before = os.getcwd()
os.chdir('..')
# Break if current dir is root.
if before == os.getcwd():
break
config.load(CONFIG)
except IOError:
config = None
...
|
27ef086cdd1037b2deaf58feeced028b2b8df154
|
app/tx/main.c
|
app/tx/main.c
|
void radio_event_handler(radio_evt_t * evt)
{
}
int main(void)
{
uint8_t i = 0;
radio_packet_t packet;
packet.len = 4;
radio_init(radio_event_handler);
NRF_GPIO->DIR = 1 << 18;
while (1)
{
packet.data[0] = i++;
packet.data[1] = 0x12;
radio_send(&packet);
NRF_GPIO->OUTSET = 1 << 18;
nrf_delay_us(100000);
NRF_GPIO->OUTCLR = 1 << 18;
nrf_delay_us(100000);
}
}
|
void radio_evt_handler(radio_evt_t * evt)
{
}
int main(void)
{
uint8_t i = 0;
radio_packet_t packet;
packet.len = 4;
packet.flags.ack = 0;
radio_init(radio_evt_handler);
NRF_GPIO->DIR = 1 << 18;
while (1)
{
packet.data[0] = i++;
packet.data[1] = 0x12;
radio_send(&packet);
NRF_GPIO->OUTSET = 1 << 18;
nrf_delay_us(100000);
NRF_GPIO->OUTCLR = 1 << 18;
nrf_delay_us(100000);
}
}
|
Rename the radio event handler.
|
Rename the radio event handler.
|
C
|
bsd-3-clause
|
hlnd/nrf51-simple-radio,hlnd/nrf51-simple-radio
|
c
|
## Code Before:
void radio_event_handler(radio_evt_t * evt)
{
}
int main(void)
{
uint8_t i = 0;
radio_packet_t packet;
packet.len = 4;
radio_init(radio_event_handler);
NRF_GPIO->DIR = 1 << 18;
while (1)
{
packet.data[0] = i++;
packet.data[1] = 0x12;
radio_send(&packet);
NRF_GPIO->OUTSET = 1 << 18;
nrf_delay_us(100000);
NRF_GPIO->OUTCLR = 1 << 18;
nrf_delay_us(100000);
}
}
## Instruction:
Rename the radio event handler.
## Code After:
void radio_evt_handler(radio_evt_t * evt)
{
}
int main(void)
{
uint8_t i = 0;
radio_packet_t packet;
packet.len = 4;
packet.flags.ack = 0;
radio_init(radio_evt_handler);
NRF_GPIO->DIR = 1 << 18;
while (1)
{
packet.data[0] = i++;
packet.data[1] = 0x12;
radio_send(&packet);
NRF_GPIO->OUTSET = 1 << 18;
nrf_delay_us(100000);
NRF_GPIO->OUTCLR = 1 << 18;
nrf_delay_us(100000);
}
}
|
...
void radio_evt_handler(radio_evt_t * evt)
{
}
...
radio_packet_t packet;
packet.len = 4;
packet.flags.ack = 0;
radio_init(radio_evt_handler);
NRF_GPIO->DIR = 1 << 18;
...
|
1179163881fe1dedab81a02a940c711479a334ab
|
Instanssi/admin_auth/forms.py
|
Instanssi/admin_auth/forms.py
|
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
Use passwordinput in password field.
|
admin_auth: Use passwordinput in password field.
|
Python
|
mit
|
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
|
python
|
## Code Before:
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana")
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
## Instruction:
admin_auth: Use passwordinput in password field.
## Code After:
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'',
'username',
'password',
ButtonHolder (
Submit('submit', 'Kirjaudu sisään')
)
)
)
|
// ... existing code ...
class LoginForm(forms.Form):
username = forms.CharField(label=u"Käyttäjätunnus", help_text=u"Admin-paneelin käyttäjätunnuksesi. Huom! OpenID-tunnukset eivät kelpaa!")
password = forms.CharField(label=u"Salasana", widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
// ... rest of the code ...
|
aaaab0d93723e880119afb52840718634b184054
|
falcom/logtree.py
|
falcom/logtree.py
|
class MutableTree:
value = None
def full_length (self):
return 0
def walk (self):
return iter(())
def __len__ (self):
return 0
def __iter__ (self):
return iter(())
def __getitem__ (self, index):
raise IndexError("tree index out of range")
def __repr__ (self):
return "<{}>".format(self.__class__.__name__)
|
class MutableTree:
def __init__ (self):
self.value = None
def full_length (self):
return 0
def walk (self):
return iter(())
def __len__ (self):
return 0
def __iter__ (self):
return iter(())
def __getitem__ (self, index):
raise IndexError("tree index out of range")
def __repr__ (self):
return "<{}>".format(self.__class__.__name__)
|
Set MutableTree.value on the object only
|
Set MutableTree.value on the object only
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
python
|
## Code Before:
class MutableTree:
value = None
def full_length (self):
return 0
def walk (self):
return iter(())
def __len__ (self):
return 0
def __iter__ (self):
return iter(())
def __getitem__ (self, index):
raise IndexError("tree index out of range")
def __repr__ (self):
return "<{}>".format(self.__class__.__name__)
## Instruction:
Set MutableTree.value on the object only
## Code After:
class MutableTree:
def __init__ (self):
self.value = None
def full_length (self):
return 0
def walk (self):
return iter(())
def __len__ (self):
return 0
def __iter__ (self):
return iter(())
def __getitem__ (self, index):
raise IndexError("tree index out of range")
def __repr__ (self):
return "<{}>".format(self.__class__.__name__)
|
...
class MutableTree:
def __init__ (self):
self.value = None
def full_length (self):
return 0
...
|
6a59e4ad609ecc84d1e7efcb6240175f981d6a66
|
org.jboss.reddeer.jface/src/org/jboss/reddeer/eclipse/jface/wizard/NewWizardDialog.java
|
org.jboss.reddeer.jface/src/org/jboss/reddeer/eclipse/jface/wizard/NewWizardDialog.java
|
package org.jboss.reddeer.eclipse.jface.wizard;
import org.apache.log4j.Logger;
import org.jboss.reddeer.swt.api.Menu;
import org.jboss.reddeer.swt.impl.menu.DefaultMenu;
import org.jboss.reddeer.swt.impl.shell.ActiveShell;
import org.jboss.reddeer.swt.impl.tree.DefaultTreeItem;
/**
* Abstract class to manage new wizard dialog
* @author vpakan
*
*/
public abstract class NewWizardDialog extends WizardDialog{
public static final String DIALOG_TITLE = "New";
private String[] path;
protected final Logger log = Logger.getLogger(this.getClass());
/**
* @param path - path to new object to be created within tree widget
* inside New wizard dialog
*/
public NewWizardDialog(String... path) {
this.path = path;
}
/**
* Opens wizard for new object to be created
*/
public void open(){
log.info("Open New Wizard");
super.open();
Menu menu = new DefaultMenu("File","New","Other...");
menu.select();
new ActiveShell(DIALOG_TITLE);
if (path.length > 0){
new DefaultTreeItem(path).select();
next();
}
}
}
|
package org.jboss.reddeer.eclipse.jface.wizard;
import org.apache.log4j.Logger;
import org.jboss.reddeer.swt.api.Menu;
import org.jboss.reddeer.swt.impl.menu.ShellMenu;
import org.jboss.reddeer.swt.impl.shell.ActiveShell;
import org.jboss.reddeer.swt.impl.tree.DefaultTreeItem;
/**
* Abstract class to manage new wizard dialog
* @author vpakan
*
*/
public abstract class NewWizardDialog extends WizardDialog{
public static final String DIALOG_TITLE = "New";
private String[] path;
protected final Logger log = Logger.getLogger(this.getClass());
/**
* @param path - path to new object to be created within tree widget
* inside New wizard dialog
*/
public NewWizardDialog(String... path) {
this.path = path;
}
/**
* Opens wizard for new object to be created
*/
public void open(){
log.info("Open New Wizard");
super.open();
Menu menu = new ShellMenu("File","New","Other...");
menu.select();
new ActiveShell(DIALOG_TITLE);
if (path.length > 0){
new DefaultTreeItem(path).select();
next();
}
}
}
|
Use ShellMenu instead of DefaultMenu
|
Use ShellMenu instead of DefaultMenu
|
Java
|
epl-1.0
|
jboss-reddeer/reddeer,jboss-reddeer/reddeer,djelinek/reddeer,djelinek/reddeer
|
java
|
## Code Before:
package org.jboss.reddeer.eclipse.jface.wizard;
import org.apache.log4j.Logger;
import org.jboss.reddeer.swt.api.Menu;
import org.jboss.reddeer.swt.impl.menu.DefaultMenu;
import org.jboss.reddeer.swt.impl.shell.ActiveShell;
import org.jboss.reddeer.swt.impl.tree.DefaultTreeItem;
/**
* Abstract class to manage new wizard dialog
* @author vpakan
*
*/
public abstract class NewWizardDialog extends WizardDialog{
public static final String DIALOG_TITLE = "New";
private String[] path;
protected final Logger log = Logger.getLogger(this.getClass());
/**
* @param path - path to new object to be created within tree widget
* inside New wizard dialog
*/
public NewWizardDialog(String... path) {
this.path = path;
}
/**
* Opens wizard for new object to be created
*/
public void open(){
log.info("Open New Wizard");
super.open();
Menu menu = new DefaultMenu("File","New","Other...");
menu.select();
new ActiveShell(DIALOG_TITLE);
if (path.length > 0){
new DefaultTreeItem(path).select();
next();
}
}
}
## Instruction:
Use ShellMenu instead of DefaultMenu
## Code After:
package org.jboss.reddeer.eclipse.jface.wizard;
import org.apache.log4j.Logger;
import org.jboss.reddeer.swt.api.Menu;
import org.jboss.reddeer.swt.impl.menu.ShellMenu;
import org.jboss.reddeer.swt.impl.shell.ActiveShell;
import org.jboss.reddeer.swt.impl.tree.DefaultTreeItem;
/**
* Abstract class to manage new wizard dialog
* @author vpakan
*
*/
public abstract class NewWizardDialog extends WizardDialog{
public static final String DIALOG_TITLE = "New";
private String[] path;
protected final Logger log = Logger.getLogger(this.getClass());
/**
* @param path - path to new object to be created within tree widget
* inside New wizard dialog
*/
public NewWizardDialog(String... path) {
this.path = path;
}
/**
* Opens wizard for new object to be created
*/
public void open(){
log.info("Open New Wizard");
super.open();
Menu menu = new ShellMenu("File","New","Other...");
menu.select();
new ActiveShell(DIALOG_TITLE);
if (path.length > 0){
new DefaultTreeItem(path).select();
next();
}
}
}
|
...
import org.apache.log4j.Logger;
import org.jboss.reddeer.swt.api.Menu;
import org.jboss.reddeer.swt.impl.menu.ShellMenu;
import org.jboss.reddeer.swt.impl.shell.ActiveShell;
import org.jboss.reddeer.swt.impl.tree.DefaultTreeItem;
...
public void open(){
log.info("Open New Wizard");
super.open();
Menu menu = new ShellMenu("File","New","Other...");
menu.select();
new ActiveShell(DIALOG_TITLE);
if (path.length > 0){
...
|
a412166af39edd7a78a1127dba2ecb5c65986049
|
feder/cases/factories.py
|
feder/cases/factories.py
|
from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
|
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
|
Clean up import in CaseFactory
|
Clean up import in CaseFactory
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
python
|
## Code Before:
from feder.cases import models
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = models.Case
## Instruction:
Clean up import in CaseFactory
## Code After:
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
name = factory.Sequence('case-{0}'.format)
user = factory.SubFactory(UserFactory)
institution = factory.SubFactory(InstitutionFactory)
@factory.lazy_attribute
def monitoring(self):
return MonitoringFactory(user=self.user)
class Meta:
model = Case
|
...
from feder.institutions.factories import InstitutionFactory
from feder.monitorings.factories import MonitoringFactory
import factory
from feder.users.factories import UserFactory
from .models import Case
class CaseFactory(factory.django.DjangoModelFactory):
...
return MonitoringFactory(user=self.user)
class Meta:
model = Case
...
|
7ebadc3a1befa265dfc65e78dfbe98041b96d076
|
serial_com_test/raspberry_pi/test.py
|
serial_com_test/raspberry_pi/test.py
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
Update SERIAL_DEVICE to match the Raspberry Pi
|
Update SERIAL_DEVICE to match the Raspberry Pi
|
Python
|
mit
|
zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9,zacharylawrence/ENEE408I-Team-9
|
python
|
## Code Before:
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/tty.usbmodem1421"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
## Instruction:
Update SERIAL_DEVICE to match the Raspberry Pi
## Code After:
import serial
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
time.sleep(2)
print("Connection Established");
# Send Data to Pi
ser.write('h')
time.sleep(5);
ser.write('l')
|
...
import time
# Define Constants
SERIAL_DEVICE = "/dev/ttyACM0"
# Establish Connection
ser = serial.Serial(SERIAL_DEVICE, 9600)
...
|
acc060f6ee491b1eed93b45df2f1b37fb710aa7a
|
test/CodeGen/wchar-const.c
|
test/CodeGen/wchar-const.c
|
// RUN: %clang_cc1 -emit-llvm %s -o - | FileCheck %s
// This should pass for any endianness combination of host and target.
// This bit is taken from Sema/wchar.c so we can avoid the wchar.h include.
typedef __WCHAR_TYPE__ wchar_t;
#if defined(_WIN32) || defined(_M_IX86) || defined(__CYGWIN__) \
|| defined(_M_X64) || defined(SHORT_WCHAR)
#define WCHAR_T_TYPE unsigned short
#elif defined(__sun) || defined(__AuroraUX__)
#define WCHAR_T_TYPE long
#else /* Solaris or AuroraUX. */
#define WCHAR_T_TYPE int
#endif
// CHECK: @.str = private unnamed_addr constant [72 x i8] c"
extern void foo(const wchar_t* p);
int main (int argc, const char * argv[])
{
foo(L"This is some text");
return 0;
}
|
// RUN: %clang_cc1 -emit-llvm %s -o - -triple i386-pc-win32 | FileCheck %s --check-prefix=WIN
// RUN: %clang_cc1 -emit-llvm %s -o - -triple x86_64-apple-darwin | FileCheck %s --check-prefix=DAR
// This should pass for any endianness combination of host and target.
// This bit is taken from Sema/wchar.c so we can avoid the wchar.h include.
typedef __WCHAR_TYPE__ wchar_t;
#if defined(_WIN32) || defined(_M_IX86) || defined(__CYGWIN__) \
|| defined(_M_X64) || defined(SHORT_WCHAR)
#define WCHAR_T_TYPE unsigned short
#elif defined(__sun) || defined(__AuroraUX__)
#define WCHAR_T_TYPE long
#else /* Solaris or AuroraUX. */
#define WCHAR_T_TYPE int
#endif
// CHECK-DAR: private unnamed_addr constant [72 x i8] c"
// CHECK-WIN: private unnamed_addr constant [36 x i8] c"
extern void foo(const wchar_t* p);
int main (int argc, const char * argv[])
{
foo(L"This is some text");
return 0;
}
|
Handle different sized wchar_t for windows.
|
Handle different sized wchar_t for windows.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@136192 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang_cc1 -emit-llvm %s -o - | FileCheck %s
// This should pass for any endianness combination of host and target.
// This bit is taken from Sema/wchar.c so we can avoid the wchar.h include.
typedef __WCHAR_TYPE__ wchar_t;
#if defined(_WIN32) || defined(_M_IX86) || defined(__CYGWIN__) \
|| defined(_M_X64) || defined(SHORT_WCHAR)
#define WCHAR_T_TYPE unsigned short
#elif defined(__sun) || defined(__AuroraUX__)
#define WCHAR_T_TYPE long
#else /* Solaris or AuroraUX. */
#define WCHAR_T_TYPE int
#endif
// CHECK: @.str = private unnamed_addr constant [72 x i8] c"
extern void foo(const wchar_t* p);
int main (int argc, const char * argv[])
{
foo(L"This is some text");
return 0;
}
## Instruction:
Handle different sized wchar_t for windows.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@136192 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang_cc1 -emit-llvm %s -o - -triple i386-pc-win32 | FileCheck %s --check-prefix=WIN
// RUN: %clang_cc1 -emit-llvm %s -o - -triple x86_64-apple-darwin | FileCheck %s --check-prefix=DAR
// This should pass for any endianness combination of host and target.
// This bit is taken from Sema/wchar.c so we can avoid the wchar.h include.
typedef __WCHAR_TYPE__ wchar_t;
#if defined(_WIN32) || defined(_M_IX86) || defined(__CYGWIN__) \
|| defined(_M_X64) || defined(SHORT_WCHAR)
#define WCHAR_T_TYPE unsigned short
#elif defined(__sun) || defined(__AuroraUX__)
#define WCHAR_T_TYPE long
#else /* Solaris or AuroraUX. */
#define WCHAR_T_TYPE int
#endif
// CHECK-DAR: private unnamed_addr constant [72 x i8] c"
// CHECK-WIN: private unnamed_addr constant [36 x i8] c"
extern void foo(const wchar_t* p);
int main (int argc, const char * argv[])
{
foo(L"This is some text");
return 0;
}
|
// ... existing code ...
// RUN: %clang_cc1 -emit-llvm %s -o - -triple i386-pc-win32 | FileCheck %s --check-prefix=WIN
// RUN: %clang_cc1 -emit-llvm %s -o - -triple x86_64-apple-darwin | FileCheck %s --check-prefix=DAR
// This should pass for any endianness combination of host and target.
// This bit is taken from Sema/wchar.c so we can avoid the wchar.h include.
// ... modified code ...
#endif
// CHECK-DAR: private unnamed_addr constant [72 x i8] c"
// CHECK-WIN: private unnamed_addr constant [36 x i8] c"
extern void foo(const wchar_t* p);
int main (int argc, const char * argv[])
{
// ... rest of the code ...
|
df8d6af61cacabf64ec3e470f2bb4d0f985a86bb
|
src/utility/pimpl_impl.h
|
src/utility/pimpl_impl.h
|
template<typename T>
pimpl<T>::pimpl() : m{ new T{} } { }
template<typename T>
template<typename ...Args>
pimpl<T>::pimpl( Args&& ...args )
: m{ new T{ std::forward<Args>(args)... } } { }
template<typename T>
pimpl<T>::~pimpl() { }
template<typename T>
T* pimpl<T>::operator->() { return m.get(); }
template<typename T>
const T* pimpl<T>::operator->() const { return m.get(); }
template<typename T>
T& pimpl<T>::operator*() { return *m.get(); }
#endif
|
template<typename T>
pimpl<T>::pimpl() : m{ make_unique<T>() } { }
template<typename T>
template<typename ...Args>
pimpl<T>::pimpl( Args&& ...args )
: m{ make_unique<T>(std::forward<Args>(args)...) } { }
template<typename T>
pimpl<T>::~pimpl() { }
template<typename T>
T* pimpl<T>::operator->() { return m.get(); }
template<typename T>
const T* pimpl<T>::operator->() const { return m.get(); }
template<typename T>
T& pimpl<T>::operator*() { return *m.get(); }
#endif
|
Use make_unique in pimpl helper
|
Use make_unique in pimpl helper
|
C
|
mit
|
adolby/Kryvos,adolby/Kryvos,adolby/Kryvos
|
c
|
## Code Before:
template<typename T>
pimpl<T>::pimpl() : m{ new T{} } { }
template<typename T>
template<typename ...Args>
pimpl<T>::pimpl( Args&& ...args )
: m{ new T{ std::forward<Args>(args)... } } { }
template<typename T>
pimpl<T>::~pimpl() { }
template<typename T>
T* pimpl<T>::operator->() { return m.get(); }
template<typename T>
const T* pimpl<T>::operator->() const { return m.get(); }
template<typename T>
T& pimpl<T>::operator*() { return *m.get(); }
#endif
## Instruction:
Use make_unique in pimpl helper
## Code After:
template<typename T>
pimpl<T>::pimpl() : m{ make_unique<T>() } { }
template<typename T>
template<typename ...Args>
pimpl<T>::pimpl( Args&& ...args )
: m{ make_unique<T>(std::forward<Args>(args)...) } { }
template<typename T>
pimpl<T>::~pimpl() { }
template<typename T>
T* pimpl<T>::operator->() { return m.get(); }
template<typename T>
const T* pimpl<T>::operator->() const { return m.get(); }
template<typename T>
T& pimpl<T>::operator*() { return *m.get(); }
#endif
|
...
template<typename T>
pimpl<T>::pimpl() : m{ make_unique<T>() } { }
template<typename T>
template<typename ...Args>
pimpl<T>::pimpl( Args&& ...args )
: m{ make_unique<T>(std::forward<Args>(args)...) } { }
template<typename T>
pimpl<T>::~pimpl() { }
...
|
8582126efa9907b06e9f9b183a0919feba9fb6b0
|
indra/literature/dart_client.py
|
indra/literature/dart_client.py
|
import logging
import requests
from indra.config import CONFIG_DICT
logger = logging.getLogger(__name__)
dart_uname = CONFIG_DICT['DART_WM_USERNAME']
dart_pwd = CONFIG_DICT['DART_WM_PASSWORD']
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
'/dart/api/v1/readers/query'
def query_dart_notifications(readers=None, versions=None, document_ids=None,
timestamp=None):
"""
Parameters
----------
readers : list
versions : list
document_ids : list
timestamp : dict("on"|"before"|"after",str)
Returns
-------
dict
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
return {}
pd = {}
if readers:
pd['readers'] = readers
if versions:
pd['versions'] = versions
if document_ids:
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pass # Check
res = requests.post(
dart_url,
data={'metadata':
None
},
auth=(dart_uname, dart_pwd)
)
if res.status_code != 200:
logger.warning(f'Dart Notifications Endpoint returned with status'
f' {res.status_code}: {res.text}')
return {}
return res.json()
|
import logging
import requests
from indra.config import get_config
logger = logging.getLogger(__name__)
dart_uname = get_config('DART_WM_USERNAME')
dart_pwd = get_config('DART_WM_PASSWORD')
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
'/dart/api/v1/readers/query'
def query_dart_notifications(readers=None, versions=None, document_ids=None,
timestamp=None):
"""
Parameters
----------
readers : list
versions : list
document_ids : list
timestamp : dict("on"|"before"|"after",str)
Returns
-------
dict
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
return {}
pd = {}
if readers:
pd['readers'] = readers
if versions:
pd['versions'] = versions
if document_ids:
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pass # Check
res = requests.post(
dart_url,
data={'metadata':
None
},
auth=(dart_uname, dart_pwd)
)
if res.status_code != 200:
logger.warning(f'Dart Notifications Endpoint returned with status'
f' {res.status_code}: {res.text}')
return {}
return res.json()
|
Use get_config instead of CONFIG_DICT
|
Use get_config instead of CONFIG_DICT
|
Python
|
bsd-2-clause
|
johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,bgyori/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra
|
python
|
## Code Before:
import logging
import requests
from indra.config import CONFIG_DICT
logger = logging.getLogger(__name__)
dart_uname = CONFIG_DICT['DART_WM_USERNAME']
dart_pwd = CONFIG_DICT['DART_WM_PASSWORD']
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
'/dart/api/v1/readers/query'
def query_dart_notifications(readers=None, versions=None, document_ids=None,
timestamp=None):
"""
Parameters
----------
readers : list
versions : list
document_ids : list
timestamp : dict("on"|"before"|"after",str)
Returns
-------
dict
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
return {}
pd = {}
if readers:
pd['readers'] = readers
if versions:
pd['versions'] = versions
if document_ids:
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pass # Check
res = requests.post(
dart_url,
data={'metadata':
None
},
auth=(dart_uname, dart_pwd)
)
if res.status_code != 200:
logger.warning(f'Dart Notifications Endpoint returned with status'
f' {res.status_code}: {res.text}')
return {}
return res.json()
## Instruction:
Use get_config instead of CONFIG_DICT
## Code After:
import logging
import requests
from indra.config import get_config
logger = logging.getLogger(__name__)
dart_uname = get_config('DART_WM_USERNAME')
dart_pwd = get_config('DART_WM_PASSWORD')
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
'/dart/api/v1/readers/query'
def query_dart_notifications(readers=None, versions=None, document_ids=None,
timestamp=None):
"""
Parameters
----------
readers : list
versions : list
document_ids : list
timestamp : dict("on"|"before"|"after",str)
Returns
-------
dict
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
return {}
pd = {}
if readers:
pd['readers'] = readers
if versions:
pd['versions'] = versions
if document_ids:
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pass # Check
res = requests.post(
dart_url,
data={'metadata':
None
},
auth=(dart_uname, dart_pwd)
)
if res.status_code != 200:
logger.warning(f'Dart Notifications Endpoint returned with status'
f' {res.status_code}: {res.text}')
return {}
return res.json()
|
# ... existing code ...
import logging
import requests
from indra.config import get_config
logger = logging.getLogger(__name__)
dart_uname = get_config('DART_WM_USERNAME')
dart_pwd = get_config('DART_WM_PASSWORD')
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
# ... rest of the code ...
|
b9c652a57d48f7618de8008e80968daa1327985e
|
src/gwt/src/org/rstudio/studio/client/pdfviewer/ui/images/Resources.java
|
src/gwt/src/org/rstudio/studio/client/pdfviewer/ui/images/Resources.java
|
/*
* Resources.java
*
* Copyright (C) 2009-11 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.pdfviewer.ui.images;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.ImageResource;
public interface Resources extends ClientBundle
{
ImageResource fileOptionsIcon();
ImageResource nextPageIcon();
ImageResource previousPageIcon();
ImageResource sizeButton();
ImageResource sizeButtonPressed();
ImageResource zoomButtonLeft();
ImageResource zoomButtonLeftPressed();
ImageResource zoomButtonRight();
ImageResource zoomButtonRightPressed();
ImageResource zoomInIcon();
ImageResource zoomOutIcon();
ImageResource thumbnailsIcon();
}
|
/*
* Resources.java
*
* Copyright (C) 2009-11 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.pdfviewer.ui.images;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.ImageResource;
public interface Resources extends ClientBundle
{
@Source("FileOptionsIcon.png")
ImageResource fileOptionsIcon();
@Source("NextPageIcon.png")
ImageResource nextPageIcon();
@Source("PreviousPageIcon.png")
ImageResource previousPageIcon();
@Source("SizeButton.png")
ImageResource sizeButton();
@Source("SizeButtonPressed.png")
ImageResource sizeButtonPressed();
@Source("ZoomButtonLeft.png")
ImageResource zoomButtonLeft();
@Source("ZoomButtonLeftPressed.png")
ImageResource zoomButtonLeftPressed();
@Source("ZoomButtonRight.png")
ImageResource zoomButtonRight();
@Source("ZoomButtonRightPressed.png")
ImageResource zoomButtonRightPressed();
@Source("ZoomInIcon.png")
ImageResource zoomInIcon();
@Source("ZoomOutIcon.png")
ImageResource zoomOutIcon();
@Source("ThumbnailsIcon.png")
ImageResource thumbnailsIcon();
}
|
Fix compile error on Linux
|
Fix compile error on Linux
|
Java
|
agpl-3.0
|
maligulzar/Rstudio-instrumented,JanMarvin/rstudio,edrogers/rstudio,jrnold/rstudio,edrogers/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,jzhu8803/rstudio,sfloresm/rstudio,jar1karp/rstudio,thklaus/rstudio,brsimioni/rstudio,jrnold/rstudio,jzhu8803/rstudio,pssguy/rstudio,maligulzar/Rstudio-instrumented,thklaus/rstudio,thklaus/rstudio,john-r-mcpherson/rstudio,john-r-mcpherson/rstudio,vbelakov/rstudio,githubfun/rstudio,tbarrongh/rstudio,john-r-mcpherson/rstudio,jzhu8803/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,pssguy/rstudio,john-r-mcpherson/rstudio,john-r-mcpherson/rstudio,vbelakov/rstudio,githubfun/rstudio,nvoron23/rstudio,sfloresm/rstudio,jrnold/rstudio,thklaus/rstudio,jzhu8803/rstudio,vbelakov/rstudio,john-r-mcpherson/rstudio,tbarrongh/rstudio,sfloresm/rstudio,githubfun/rstudio,jzhu8803/rstudio,edrogers/rstudio,JanMarvin/rstudio,edrogers/rstudio,brsimioni/rstudio,maligulzar/Rstudio-instrumented,nvoron23/rstudio,jrnold/rstudio,githubfun/rstudio,tbarrongh/rstudio,suribes/rstudio,more1/rstudio,vbelakov/rstudio,piersharding/rstudio,tbarrongh/rstudio,pssguy/rstudio,piersharding/rstudio,sfloresm/rstudio,jzhu8803/rstudio,edrogers/rstudio,more1/rstudio,maligulzar/Rstudio-instrumented,JanMarvin/rstudio,JanMarvin/rstudio,sfloresm/rstudio,maligulzar/Rstudio-instrumented,suribes/rstudio,brsimioni/rstudio,brsimioni/rstudio,piersharding/rstudio,jar1karp/rstudio,nvoron23/rstudio,jrnold/rstudio,JanMarvin/rstudio,piersharding/rstudio,piersharding/rstudio,more1/rstudio,edrogers/rstudio,jar1karp/rstudio,nvoron23/rstudio,maligulzar/Rstudio-instrumented,sfloresm/rstudio,thklaus/rstudio,thklaus/rstudio,jrnold/rstudio,pssguy/rstudio,suribes/rstudio,suribes/rstudio,more1/rstudio,githubfun/rstudio,nvoron23/rstudio,sfloresm/rstudio,john-r-mcpherson/rstudio,suribes/rstudio,jrnold/rstudio,brsimioni/rstudio,jzhu8803/rstudio,pssguy/rstudio,vbelakov/rstudio,jzhu8803/rstudio,nvoron23/rstudio,suribes/rstudio,john-r-mcpherson/rstudio,thklaus/rstudio,JanMarvin/rstudio,pssguy/rstudio,pssguy/rstudio,pssguy/rstudio,piersharding/rstudio,brsimioni/rstudio,githubfun/rstudio,suribes/rstudio,jar1karp/rstudio,githubfun/rstudio,tbarrongh/rstudio,tbarrongh/rstudio,vbelakov/rstudio,vbelakov/rstudio,brsimioni/rstudio,tbarrongh/rstudio,piersharding/rstudio,tbarrongh/rstudio,brsimioni/rstudio,jar1karp/rstudio,jrnold/rstudio,sfloresm/rstudio,piersharding/rstudio,githubfun/rstudio,jar1karp/rstudio,edrogers/rstudio,more1/rstudio,suribes/rstudio,vbelakov/rstudio,JanMarvin/rstudio,more1/rstudio,nvoron23/rstudio,edrogers/rstudio,thklaus/rstudio,jrnold/rstudio,more1/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,piersharding/rstudio,maligulzar/Rstudio-instrumented,jar1karp/rstudio,more1/rstudio
|
java
|
## Code Before:
/*
* Resources.java
*
* Copyright (C) 2009-11 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.pdfviewer.ui.images;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.ImageResource;
public interface Resources extends ClientBundle
{
ImageResource fileOptionsIcon();
ImageResource nextPageIcon();
ImageResource previousPageIcon();
ImageResource sizeButton();
ImageResource sizeButtonPressed();
ImageResource zoomButtonLeft();
ImageResource zoomButtonLeftPressed();
ImageResource zoomButtonRight();
ImageResource zoomButtonRightPressed();
ImageResource zoomInIcon();
ImageResource zoomOutIcon();
ImageResource thumbnailsIcon();
}
## Instruction:
Fix compile error on Linux
## Code After:
/*
* Resources.java
*
* Copyright (C) 2009-11 by RStudio, Inc.
*
* This program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.pdfviewer.ui.images;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.ImageResource;
public interface Resources extends ClientBundle
{
@Source("FileOptionsIcon.png")
ImageResource fileOptionsIcon();
@Source("NextPageIcon.png")
ImageResource nextPageIcon();
@Source("PreviousPageIcon.png")
ImageResource previousPageIcon();
@Source("SizeButton.png")
ImageResource sizeButton();
@Source("SizeButtonPressed.png")
ImageResource sizeButtonPressed();
@Source("ZoomButtonLeft.png")
ImageResource zoomButtonLeft();
@Source("ZoomButtonLeftPressed.png")
ImageResource zoomButtonLeftPressed();
@Source("ZoomButtonRight.png")
ImageResource zoomButtonRight();
@Source("ZoomButtonRightPressed.png")
ImageResource zoomButtonRightPressed();
@Source("ZoomInIcon.png")
ImageResource zoomInIcon();
@Source("ZoomOutIcon.png")
ImageResource zoomOutIcon();
@Source("ThumbnailsIcon.png")
ImageResource thumbnailsIcon();
}
|
# ... existing code ...
public interface Resources extends ClientBundle
{
@Source("FileOptionsIcon.png")
ImageResource fileOptionsIcon();
@Source("NextPageIcon.png")
ImageResource nextPageIcon();
@Source("PreviousPageIcon.png")
ImageResource previousPageIcon();
@Source("SizeButton.png")
ImageResource sizeButton();
@Source("SizeButtonPressed.png")
ImageResource sizeButtonPressed();
@Source("ZoomButtonLeft.png")
ImageResource zoomButtonLeft();
@Source("ZoomButtonLeftPressed.png")
ImageResource zoomButtonLeftPressed();
@Source("ZoomButtonRight.png")
ImageResource zoomButtonRight();
@Source("ZoomButtonRightPressed.png")
ImageResource zoomButtonRightPressed();
@Source("ZoomInIcon.png")
ImageResource zoomInIcon();
@Source("ZoomOutIcon.png")
ImageResource zoomOutIcon();
@Source("ThumbnailsIcon.png")
ImageResource thumbnailsIcon();
}
# ... rest of the code ...
|
e5acbfc176de3b531528c8b15f57e5d3feab3ad1
|
melody/constraints/abstract_constraint.py
|
melody/constraints/abstract_constraint.py
|
from abc import ABCMeta, abstractmethod
class AbstractConstraint(object):
"""
Class that represents a constraint, a set of actors that define a constraint amongst themselves.
ParameterMap: A map from template note to contextual note..
"""
__metaclass__ = ABCMeta
def __init__(self, actors):
self.__actors = list(actors)
@property
def actors(self):
return list(self.__actors)
@abstractmethod
def clone(self, new_actors=None):
"""
Clone the constraint.
:return:
"""
@abstractmethod
def verify(self, solution_context):
"""
Verify that the actor map parameters are consistent with constraint.
:params solution_context: aka pmap, map of actors to ContextualNotes.
:return: Boolean if verification holds.
May throw Exception dependent on implementation.
"""
@abstractmethod
def values(self, solution_context, v_note):
"""
Method to generate all possible note values for actor v_note's target.
The method returns a set of values for v_note.
:param solution_context: includes parameter map.
:param v_note: source actor, whose target values we are computing.
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
|
from abc import ABCMeta, abstractmethod
class AbstractConstraint(object):
"""
Class that represents a constraint, a set of actors that define a constraint amongst themselves.
ParameterMap: A map from template note to contextual note..
"""
__metaclass__ = ABCMeta
def __init__(self, actors):
self.__actors = list(actors)
@property
def actors(self):
return list(self.__actors)
@abstractmethod
def clone(self, new_actors=None):
"""
Clone the constraint.
:return:
"""
@abstractmethod
def verify(self, solution_context):
"""
Verify that the actor map parameters are consistent with constraint.
:params solution_context: aka pmap, map of actors to ContextualNotes.
:return: Boolean if verification holds.
May throw Exception dependent on implementation.
"""
@abstractmethod
def values(self, solution_context, v_note):
"""
Method to generate all possible note values for actor v_note's target.
The method returns a set of values for v_note.
:param solution_context: includes parameter map.
:param v_note: source actor, whose target values we are computing.
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
def __hash__(self):
return hash(len(self.actors))
def __eq__(self, other):
if not isinstance(other, AbstractConstraint):
return NotImplemented
return self is other
|
Add hash and eq methods
|
Add hash and eq methods
|
Python
|
mit
|
dpazel/music_rep
|
python
|
## Code Before:
from abc import ABCMeta, abstractmethod
class AbstractConstraint(object):
"""
Class that represents a constraint, a set of actors that define a constraint amongst themselves.
ParameterMap: A map from template note to contextual note..
"""
__metaclass__ = ABCMeta
def __init__(self, actors):
self.__actors = list(actors)
@property
def actors(self):
return list(self.__actors)
@abstractmethod
def clone(self, new_actors=None):
"""
Clone the constraint.
:return:
"""
@abstractmethod
def verify(self, solution_context):
"""
Verify that the actor map parameters are consistent with constraint.
:params solution_context: aka pmap, map of actors to ContextualNotes.
:return: Boolean if verification holds.
May throw Exception dependent on implementation.
"""
@abstractmethod
def values(self, solution_context, v_note):
"""
Method to generate all possible note values for actor v_note's target.
The method returns a set of values for v_note.
:param solution_context: includes parameter map.
:param v_note: source actor, whose target values we are computing.
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
## Instruction:
Add hash and eq methods
## Code After:
from abc import ABCMeta, abstractmethod
class AbstractConstraint(object):
"""
Class that represents a constraint, a set of actors that define a constraint amongst themselves.
ParameterMap: A map from template note to contextual note..
"""
__metaclass__ = ABCMeta
def __init__(self, actors):
self.__actors = list(actors)
@property
def actors(self):
return list(self.__actors)
@abstractmethod
def clone(self, new_actors=None):
"""
Clone the constraint.
:return:
"""
@abstractmethod
def verify(self, solution_context):
"""
Verify that the actor map parameters are consistent with constraint.
:params solution_context: aka pmap, map of actors to ContextualNotes.
:return: Boolean if verification holds.
May throw Exception dependent on implementation.
"""
@abstractmethod
def values(self, solution_context, v_note):
"""
Method to generate all possible note values for actor v_note's target.
The method returns a set of values for v_note.
:param solution_context: includes parameter map.
:param v_note: source actor, whose target values we are computing.
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
def __hash__(self):
return hash(len(self.actors))
def __eq__(self, other):
if not isinstance(other, AbstractConstraint):
return NotImplemented
return self is other
|
...
:return: The set of all possible values for v_note's target.
Note: The return value is a set!
"""
def __hash__(self):
return hash(len(self.actors))
def __eq__(self, other):
if not isinstance(other, AbstractConstraint):
return NotImplemented
return self is other
...
|
1455da161123ea778d8e82c2f961fdcf85cd10aa
|
monitor-checker-http.py
|
monitor-checker-http.py
|
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
|
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
|
Add credentials + code clean up
|
Add credentials + code clean up
|
Python
|
mit
|
observer-hackaton/monitor-checker-http
|
python
|
## Code Before:
import pika
import json
import requests
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER))
channel = connection.channel()
channel.queue_declare(queue='http')
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
print resp
channel.queue_declare(queue='results')
channel.basic_publish(exchange='results',
routing_key='results',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
## Instruction:
Add credentials + code clean up
## Code After:
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
host = json.loads(req["monitor"]["check"]["arguments"])["host"]
r = requests.get(host)
req["monitor"]["result"]= {}
req["monitor"]["result"]["status"] = "ok" if r.status_code == 200 else "fail"
req["monitor"]["result"]["check"] = req["monitor"]["check"]
del req["monitor"]["check"]
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
queue='http',
no_ack=True)
channel.start_consuming()
|
// ... existing code ...
import pika
import json
import requests
import os
RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"]
RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"]
RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"]
credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD)
connection = pika.BlockingConnection(pika.ConnectionParameters(
RABBIT_MQ_SERVER, credentials = credentials))
channel = connection.channel()
def callback(ch, method, properties, body):
req = json.loads(body)
// ... modified code ...
print req
print r.status_code
resp = json.dumps(req)
channel.basic_publish(exchange='results',
routing_key='',
body=resp)
channel.basic_consume(callback,
// ... rest of the code ...
|
c891abe89cdcfe47950e6c9d169747b94e0c07f8
|
ollie/src/main/java/ollie/internal/ModelAdapter.java
|
ollie/src/main/java/ollie/internal/ModelAdapter.java
|
package ollie.internal;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.provider.BaseColumns;
import ollie.Model;
public abstract class ModelAdapter<T extends Model> {
public abstract Class<? extends Model> getModelType();
public abstract String getTableName();
public abstract String getSchema();
public abstract void load(T entity, Cursor cursor);
public abstract Long save(T entity, SQLiteDatabase db);
public abstract void delete(T entity, SQLiteDatabase db);
protected final Long insertOrUpdate(T entity, SQLiteDatabase db, ContentValues values) {
if (entity.id == null) {
entity.id = db.insert(getTableName(), null, values);
} else {
db.update(getTableName(), values, "WHERE " + BaseColumns._ID + "=?", new String[]{entity.id.toString()});
}
return entity.id;
}
}
|
package ollie.internal;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.provider.BaseColumns;
import ollie.Model;
public abstract class ModelAdapter<T extends Model> {
public abstract Class<? extends Model> getModelType();
public abstract String getTableName();
public abstract String getSchema();
public abstract void load(T entity, Cursor cursor);
public abstract Long save(T entity, SQLiteDatabase db);
public abstract void delete(T entity, SQLiteDatabase db);
protected final Long insertOrUpdate(T entity, SQLiteDatabase db, ContentValues values) {
if (entity.id == null) {
entity.id = db.insert(getTableName(), null, values);
} else {
db.update(getTableName(), values, BaseColumns._ID + "=?", new String[]{entity.id.toString()});
}
return entity.id;
}
}
|
Fix update "WHERE WHERE" bug.
|
Fix update "WHERE WHERE" bug.
|
Java
|
apache-2.0
|
pardom/Ollie,ArnaudPiroelle/Ollie,gotokatsuya/Ollie,r0adkll/Ollie
|
java
|
## Code Before:
package ollie.internal;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.provider.BaseColumns;
import ollie.Model;
public abstract class ModelAdapter<T extends Model> {
public abstract Class<? extends Model> getModelType();
public abstract String getTableName();
public abstract String getSchema();
public abstract void load(T entity, Cursor cursor);
public abstract Long save(T entity, SQLiteDatabase db);
public abstract void delete(T entity, SQLiteDatabase db);
protected final Long insertOrUpdate(T entity, SQLiteDatabase db, ContentValues values) {
if (entity.id == null) {
entity.id = db.insert(getTableName(), null, values);
} else {
db.update(getTableName(), values, "WHERE " + BaseColumns._ID + "=?", new String[]{entity.id.toString()});
}
return entity.id;
}
}
## Instruction:
Fix update "WHERE WHERE" bug.
## Code After:
package ollie.internal;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.provider.BaseColumns;
import ollie.Model;
public abstract class ModelAdapter<T extends Model> {
public abstract Class<? extends Model> getModelType();
public abstract String getTableName();
public abstract String getSchema();
public abstract void load(T entity, Cursor cursor);
public abstract Long save(T entity, SQLiteDatabase db);
public abstract void delete(T entity, SQLiteDatabase db);
protected final Long insertOrUpdate(T entity, SQLiteDatabase db, ContentValues values) {
if (entity.id == null) {
entity.id = db.insert(getTableName(), null, values);
} else {
db.update(getTableName(), values, BaseColumns._ID + "=?", new String[]{entity.id.toString()});
}
return entity.id;
}
}
|
// ... existing code ...
if (entity.id == null) {
entity.id = db.insert(getTableName(), null, values);
} else {
db.update(getTableName(), values, BaseColumns._ID + "=?", new String[]{entity.id.toString()});
}
return entity.id;
// ... rest of the code ...
|
0ccf40ceff0ca0000641791dc9750fcd52932cd8
|
setup.py
|
setup.py
|
import io
import setuptools
with io.open('README.txt', encoding='utf-8') as readme:
long_description = readme.read()
with io.open('CHANGES.txt', encoding='utf-8') as changes:
long_description += '\n\n' + changes.read()
setup_params = dict(
name='tempora',
use_hg_version=True,
author="Jason R. Coombs",
author_email="[email protected]",
description="tempora",
long_description=long_description,
url="https://bitbucket.org/jaraco/tempora",
py_modules=['tempora'],
entry_points={
'console_scripts': [
'calc-prorate = tempora:calculate_prorated_values',
],
},
setup_requires=[
'hgtools',
'pytest-runner',
'sphinx',
],
tests_require=[
'pytest',
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
|
import io
import setuptools
with io.open('README.txt', encoding='utf-8') as readme:
long_description = readme.read()
with io.open('CHANGES.txt', encoding='utf-8') as changes:
long_description += '\n\n' + changes.read()
setup_params = dict(
name='tempora',
use_hg_version=True,
author="Jason R. Coombs",
author_email="[email protected]",
description="tempora",
long_description=long_description,
url="https://bitbucket.org/jaraco/tempora",
py_modules=['tempora'],
license='MIT',
entry_points={
'console_scripts': [
'calc-prorate = tempora:calculate_prorated_values',
],
},
setup_requires=[
'hgtools',
'pytest-runner',
'sphinx',
],
tests_require=[
'pytest',
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
|
Add license info (retroactively applied to all commits).
|
Add license info (retroactively applied to all commits).
|
Python
|
mit
|
jaraco/tempora
|
python
|
## Code Before:
import io
import setuptools
with io.open('README.txt', encoding='utf-8') as readme:
long_description = readme.read()
with io.open('CHANGES.txt', encoding='utf-8') as changes:
long_description += '\n\n' + changes.read()
setup_params = dict(
name='tempora',
use_hg_version=True,
author="Jason R. Coombs",
author_email="[email protected]",
description="tempora",
long_description=long_description,
url="https://bitbucket.org/jaraco/tempora",
py_modules=['tempora'],
entry_points={
'console_scripts': [
'calc-prorate = tempora:calculate_prorated_values',
],
},
setup_requires=[
'hgtools',
'pytest-runner',
'sphinx',
],
tests_require=[
'pytest',
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
## Instruction:
Add license info (retroactively applied to all commits).
## Code After:
import io
import setuptools
with io.open('README.txt', encoding='utf-8') as readme:
long_description = readme.read()
with io.open('CHANGES.txt', encoding='utf-8') as changes:
long_description += '\n\n' + changes.read()
setup_params = dict(
name='tempora',
use_hg_version=True,
author="Jason R. Coombs",
author_email="[email protected]",
description="tempora",
long_description=long_description,
url="https://bitbucket.org/jaraco/tempora",
py_modules=['tempora'],
license='MIT',
entry_points={
'console_scripts': [
'calc-prorate = tempora:calculate_prorated_values',
],
},
setup_requires=[
'hgtools',
'pytest-runner',
'sphinx',
],
tests_require=[
'pytest',
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
|
...
long_description=long_description,
url="https://bitbucket.org/jaraco/tempora",
py_modules=['tempora'],
license='MIT',
entry_points={
'console_scripts': [
'calc-prorate = tempora:calculate_prorated_values',
...
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
...
|
bdc0466c63347280fbd8bc8c30fb07f294200194
|
client/third_party/idna/__init__.py
|
client/third_party/idna/__init__.py
|
def encode(host, uts46):
return unicode(host)
|
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
Change idna stub to use python's default
|
[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
[email protected]
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <[email protected]>
Commit-Queue: Marc-Antoine Ruel <[email protected]>
|
Python
|
apache-2.0
|
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
|
python
|
## Code Before:
def encode(host, uts46):
return unicode(host)
## Instruction:
[client] Change idna stub to use python's default
Fix a regression from 690b8ae29be2ca3b4782fa6ad0e7f2454443c38d which broke
select bots running inside docker.
The new stub is still simpler than https://pypi.org/project/idna/ and lighter
weight but much better than ignoring the "xn-" encoding as this was done
previously. As per the project home page:
This acts as a suitable replacement for the “encodings.idna” module that comes
with the Python standard library, but only supports the old, deprecated IDNA
specification (RFC 3490).
In practice, we don't expect to use non-ASCII hostnames, so it's not a big deal
for us.
decode() is required by openssl/x509.py.
[email protected]
Bug: 916644
Change-Id: Ia999a56b981d943e2f3d942f83e40d40e1bb805b
Reviewed-on: https://chromium-review.googlesource.com/c/infra/luci/luci-py/+/1573244
Reviewed-by: Marc-Antoine Ruel <[email protected]>
Commit-Queue: Marc-Antoine Ruel <[email protected]>
## Code After:
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
|
...
from encodings import idna
def encode(host, uts46=False): # pylint: disable=unused-argument
# Used by urllib3
return idna.ToASCII(host)
def decode(host):
# Used by cryptography/hazmat/backends/openssl/x509.py
return idna.ToUnicode(host)
...
|
4868c9a6e976612ceed26c166c3dea7fd58d0d35
|
setup.py
|
setup.py
|
import ast
import os
import re
import sys
from setuptools import find_packages, setup
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
# parse version from locust/__init__.py
_version_re = re.compile(r"__version__\s+=\s+(.*)")
_init_file = os.path.join(ROOT_PATH, "locust", "__init__.py")
with open(_init_file, "rb") as f:
version = str(ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1)))
setup(
name="locust",
version=version,
install_requires=[
"gevent>=20.9.0",
"flask>=2.0.0",
"Werkzeug>=2.0.0",
"requests>=2.9.1",
"msgpack>=0.6.2",
"pyzmq>=16.0.2",
"geventhttpclient>=1.4.4",
"ConfigArgParse>=1.0",
"psutil>=5.6.7",
"Flask-BasicAuth>=0.2.0",
"Flask-Cors>=3.0.10",
],
test_suite="locust.test",
tests_require=[
"cryptography",
"mock",
"pyquery",
],
extras_require={
":sys_platform == 'win32'": ["pywin32"],
},
)
|
import ast
import os
import re
import sys
from setuptools import find_packages, setup
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
# parse version from locust/__init__.py
_version_re = re.compile(r"__version__\s+=\s+(.*)")
_init_file = os.path.join(ROOT_PATH, "locust", "__init__.py")
with open(_init_file, "rb") as f:
version = str(ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1)))
setup(
name="locust",
version=version,
install_requires=[
"gevent>=20.9.0",
"flask>=2.0.0",
"Werkzeug>=2.0.0",
"requests>=2.9.1",
"msgpack>=0.6.2",
"pyzmq>=16.0.2",
"geventhttpclient>=1.4.4",
"ConfigArgParse>=1.0",
"psutil>=5.6.7",
"Flask-BasicAuth>=0.2.0",
"Flask-Cors>=3.0.10",
"roundrobin>=0.0.2",
],
test_suite="locust.test",
tests_require=[
"cryptography",
"mock",
"pyquery",
],
extras_require={
":sys_platform == 'win32'": ["pywin32"],
},
)
|
Include `roundrobin` in the dependencies
|
Include `roundrobin` in the dependencies
|
Python
|
mit
|
locustio/locust,locustio/locust,mbeacom/locust,mbeacom/locust,locustio/locust,mbeacom/locust,locustio/locust,mbeacom/locust
|
python
|
## Code Before:
import ast
import os
import re
import sys
from setuptools import find_packages, setup
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
# parse version from locust/__init__.py
_version_re = re.compile(r"__version__\s+=\s+(.*)")
_init_file = os.path.join(ROOT_PATH, "locust", "__init__.py")
with open(_init_file, "rb") as f:
version = str(ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1)))
setup(
name="locust",
version=version,
install_requires=[
"gevent>=20.9.0",
"flask>=2.0.0",
"Werkzeug>=2.0.0",
"requests>=2.9.1",
"msgpack>=0.6.2",
"pyzmq>=16.0.2",
"geventhttpclient>=1.4.4",
"ConfigArgParse>=1.0",
"psutil>=5.6.7",
"Flask-BasicAuth>=0.2.0",
"Flask-Cors>=3.0.10",
],
test_suite="locust.test",
tests_require=[
"cryptography",
"mock",
"pyquery",
],
extras_require={
":sys_platform == 'win32'": ["pywin32"],
},
)
## Instruction:
Include `roundrobin` in the dependencies
## Code After:
import ast
import os
import re
import sys
from setuptools import find_packages, setup
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
# parse version from locust/__init__.py
_version_re = re.compile(r"__version__\s+=\s+(.*)")
_init_file = os.path.join(ROOT_PATH, "locust", "__init__.py")
with open(_init_file, "rb") as f:
version = str(ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1)))
setup(
name="locust",
version=version,
install_requires=[
"gevent>=20.9.0",
"flask>=2.0.0",
"Werkzeug>=2.0.0",
"requests>=2.9.1",
"msgpack>=0.6.2",
"pyzmq>=16.0.2",
"geventhttpclient>=1.4.4",
"ConfigArgParse>=1.0",
"psutil>=5.6.7",
"Flask-BasicAuth>=0.2.0",
"Flask-Cors>=3.0.10",
"roundrobin>=0.0.2",
],
test_suite="locust.test",
tests_require=[
"cryptography",
"mock",
"pyquery",
],
extras_require={
":sys_platform == 'win32'": ["pywin32"],
},
)
|
// ... existing code ...
"psutil>=5.6.7",
"Flask-BasicAuth>=0.2.0",
"Flask-Cors>=3.0.10",
"roundrobin>=0.0.2",
],
test_suite="locust.test",
tests_require=[
// ... rest of the code ...
|
0da6fb4d36a340b36f6153c66bcd432982abf1dd
|
src/python/pylogger.h
|
src/python/pylogger.h
|
namespace Cantera
{
/// Logger for Python.
/// @ingroup textlogs
class Py_Logger : public Logger
{
public:
Py_Logger() {
PyRun_SimpleString("import sys");
}
virtual ~Py_Logger() {}
virtual void write(const std::string& s) {
std::string ss = "sys.stdout.write(\"\"\"";
ss += s;
ss += "\"\"\")";
PyRun_SimpleString(ss.c_str());
PyRun_SimpleString("sys.stdout.flush()");
}
virtual void error(const std::string& msg) {
std::string err = "raise \""+msg+"\"";
PyRun_SimpleString((char*)err.c_str());
}
};
}
#endif
|
namespace Cantera
{
/// Logger for Python.
/// @ingroup textlogs
class Py_Logger : public Logger
{
public:
Py_Logger() {
PyRun_SimpleString("import sys");
}
virtual ~Py_Logger() {}
virtual void write(const std::string& s) {
std::string ss = "sys.stdout.write(\"\"\"";
ss += s;
ss += "\"\"\")";
PyRun_SimpleString(ss.c_str());
PyRun_SimpleString("sys.stdout.flush()");
}
virtual void error(const std::string& msg) {
std::string err = "raise Exception(\"\"\""+msg+"\"\"\")";
PyRun_SimpleString(err.c_str());
}
};
}
#endif
|
Fix Py_Logger to raise instances of Exception instead of strings
|
Fix Py_Logger to raise instances of Exception instead of strings
Raising string exceptions was removed in Python 2.6
|
C
|
bsd-3-clause
|
imitrichev/cantera,Heathckliff/cantera,Heathckliff/cantera,imitrichev/cantera,imitrichev/cantera,imitrichev/cantera,imitrichev/cantera,Heathckliff/cantera,imitrichev/cantera,Heathckliff/cantera,Heathckliff/cantera,Heathckliff/cantera
|
c
|
## Code Before:
namespace Cantera
{
/// Logger for Python.
/// @ingroup textlogs
class Py_Logger : public Logger
{
public:
Py_Logger() {
PyRun_SimpleString("import sys");
}
virtual ~Py_Logger() {}
virtual void write(const std::string& s) {
std::string ss = "sys.stdout.write(\"\"\"";
ss += s;
ss += "\"\"\")";
PyRun_SimpleString(ss.c_str());
PyRun_SimpleString("sys.stdout.flush()");
}
virtual void error(const std::string& msg) {
std::string err = "raise \""+msg+"\"";
PyRun_SimpleString((char*)err.c_str());
}
};
}
#endif
## Instruction:
Fix Py_Logger to raise instances of Exception instead of strings
Raising string exceptions was removed in Python 2.6
## Code After:
namespace Cantera
{
/// Logger for Python.
/// @ingroup textlogs
class Py_Logger : public Logger
{
public:
Py_Logger() {
PyRun_SimpleString("import sys");
}
virtual ~Py_Logger() {}
virtual void write(const std::string& s) {
std::string ss = "sys.stdout.write(\"\"\"";
ss += s;
ss += "\"\"\")";
PyRun_SimpleString(ss.c_str());
PyRun_SimpleString("sys.stdout.flush()");
}
virtual void error(const std::string& msg) {
std::string err = "raise Exception(\"\"\""+msg+"\"\"\")";
PyRun_SimpleString(err.c_str());
}
};
}
#endif
|
# ... existing code ...
}
virtual void error(const std::string& msg) {
std::string err = "raise Exception(\"\"\""+msg+"\"\"\")";
PyRun_SimpleString(err.c_str());
}
};
}
# ... rest of the code ...
|
7b5ffcef89fe12576885bf4d29651829a5ed6249
|
gala/__init__.py
|
gala/__init__.py
|
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.4dev'
|
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.4dev'
|
Remove no longer valid del sys statement
|
Remove no longer valid del sys statement
|
Python
|
bsd-3-clause
|
jni/gala,janelia-flyem/gala
|
python
|
## Code Before:
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
del sys, logging
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.4dev'
## Instruction:
Remove no longer valid del sys statement
## Code After:
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
__version__ = '0.4dev'
|
# ... existing code ...
__author__ = 'Juan Nunez-Iglesias <[email protected]>, '+\
'Ryan Kennedy <[email protected]>'
__all__ = ['agglo', 'morpho', 'evaluate', 'viz', 'imio', 'classify',
'stack_np', 'app_logger', 'option_manager', 'features', 'filter']
# ... rest of the code ...
|
987c54559cb52370fc459a30cdbdfd0e38c5ef62
|
plata/context_processors.py
|
plata/context_processors.py
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
Add the variable `plata.price_includes_tax` to the template context
|
Add the variable `plata.price_includes_tax` to the template context
|
Python
|
bsd-3-clause
|
armicron/plata,armicron/plata,stefanklug/plata,armicron/plata
|
python
|
## Code Before:
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
## Instruction:
Add the variable `plata.price_includes_tax` to the template context
## Code After:
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
...
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
...
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
...
|
f8a34d07fc740faf9e5eb4e9bf60578af312d66f
|
app/src/main/java/org/apache/taverna/mobile/activities/RunResult.java
|
app/src/main/java/org/apache/taverna/mobile/activities/RunResult.java
|
package org.apache.taverna.mobile.activities;
import org.apache.taverna.mobile.R;
import org.apache.taverna.mobile.fragments.workflowdetails.RunFragment;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuItem;
public class RunResult extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_run_result);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, RunFragment.newInstance())
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
this.finish();
return super.onOptionsItemSelected(item);
}
}
|
package org.apache.taverna.mobile.activities;
import org.apache.taverna.mobile.R;
import org.apache.taverna.mobile.fragments.workflowdetails.RunFragment;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
public class RunResult extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_run_result);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, RunFragment.newInstance())
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
this.finish();
return super.onOptionsItemSelected(item);
}
}
|
Use AppCompat instead of ActionBar
|
Use AppCompat instead of ActionBar
|
Java
|
apache-2.0
|
apache/incubator-taverna-mobile,apache/incubator-taverna-mobile,sagar15795/incubator-taverna-mobile,sagar15795/incubator-taverna-mobile,ianwdunlop/incubator-taverna-mobile
|
java
|
## Code Before:
package org.apache.taverna.mobile.activities;
import org.apache.taverna.mobile.R;
import org.apache.taverna.mobile.fragments.workflowdetails.RunFragment;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.view.Menu;
import android.view.MenuItem;
public class RunResult extends ActionBarActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_run_result);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, RunFragment.newInstance())
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
this.finish();
return super.onOptionsItemSelected(item);
}
}
## Instruction:
Use AppCompat instead of ActionBar
## Code After:
package org.apache.taverna.mobile.activities;
import org.apache.taverna.mobile.R;
import org.apache.taverna.mobile.fragments.workflowdetails.RunFragment;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
public class RunResult extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_run_result);
if (savedInstanceState == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.container, RunFragment.newInstance())
.commit();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return false;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
this.finish();
return super.onOptionsItemSelected(item);
}
}
|
# ... existing code ...
import org.apache.taverna.mobile.fragments.workflowdetails.RunFragment;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
public class RunResult extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
# ... rest of the code ...
|
e318716fdaeda8fdabe06daf644178a43bc7400e
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='nanomon',
version='1.0',
author='Michael Barrett',
author_email='[email protected]',
description='The Nano Monitoring System',
packages=find_packages(),
)
|
from setuptools import setup, find_packages
setup(
name='nymms',
version='0.1.0',
author='Michael Barrett',
author_email='[email protected]',
license="New BSD license",
description='Not Your Mother\'s Monitoring System (NYMMS)',
packages=find_packages(),
)
|
Change package name, add license
|
Change package name, add license
|
Python
|
bsd-2-clause
|
cloudtools/nymms
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(name='nanomon',
version='1.0',
author='Michael Barrett',
author_email='[email protected]',
description='The Nano Monitoring System',
packages=find_packages(),
)
## Instruction:
Change package name, add license
## Code After:
from setuptools import setup, find_packages
setup(
name='nymms',
version='0.1.0',
author='Michael Barrett',
author_email='[email protected]',
license="New BSD license",
description='Not Your Mother\'s Monitoring System (NYMMS)',
packages=find_packages(),
)
|
# ... existing code ...
from setuptools import setup, find_packages
setup(
name='nymms',
version='0.1.0',
author='Michael Barrett',
author_email='[email protected]',
license="New BSD license",
description='Not Your Mother\'s Monitoring System (NYMMS)',
packages=find_packages(),
)
# ... rest of the code ...
|
153688b63103a024b126a7c92eb9d0816500d2dc
|
ircstat/ent.py
|
ircstat/ent.py
|
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
|
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
Update Struct.__repr__ to show subclass names
|
Update Struct.__repr__ to show subclass names
|
Python
|
mit
|
jreese/ircstat,jreese/ircstat
|
python
|
## Code Before:
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<Struct %s>' % self.__dict__
## Instruction:
Update Struct.__repr__ to show subclass names
## Code After:
class Struct(object):
"""A basic object type that, given a dictionary or keyword arguments,
converts the key/value pairs into object attributes."""
def __init__(self, data=None, **kwargs):
if data is not None:
self.__dict__.update(data)
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
|
// ... existing code ...
self.__dict__.update(kwargs)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.__dict__)
// ... rest of the code ...
|
081dcb1a6f3531249f8948b019d8fdc4175dbe61
|
makerscience_profile/api.py
|
makerscience_profile/api.py
|
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
return bundle
|
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
|
Add fullname in REST response
|
Add fullname in REST response
|
Python
|
agpl-3.0
|
atiberghien/makerscience-server,atiberghien/makerscience-server
|
python
|
## Code Before:
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
return bundle
## Instruction:
Add fullname in REST response
## Code After:
from .models import MakerScienceProfile
from tastypie.resources import ModelResource
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.constants import ALL_WITH_RELATIONS
from dataserver.authentication import AnonymousApiKeyAuthentication
from accounts.api import ProfileResource
from scout.api import PostalAddressResource
class MakerScienceProfileResource(ModelResource):
parent = fields.OneToOneField(ProfileResource, 'parent')
location = fields.ToOneField(PostalAddressResource, 'location', null=True, blank=True, full=True)
class Meta:
queryset = MakerScienceProfile.objects.all()
allowed_methods = ['get', 'post', 'put', 'patch']
resource_name = 'makerscience/profile'
authentication = AnonymousApiKeyAuthentication()
authorization = DjangoAuthorization()
always_return_data = True
filtering = {
'parent' : ALL_WITH_RELATIONS,
}
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
|
// ... existing code ...
def dehydrate(self, bundle):
bundle.data["first_name"] = bundle.obj.parent.user.first_name
bundle.data["last_name"] = bundle.obj.parent.user.last_name
bundle.data["full_name"] = "%s %s" % (bundle.obj.parent.user.first_name, bundle.obj.parent.user.last_name)
return bundle
// ... rest of the code ...
|
5558b19b46fbe1db6f25b227ac581095fedcff2e
|
us_ignite/maps/views.py
|
us_ignite/maps/views.py
|
import json
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse
from django.template.response import TemplateResponse
from us_ignite.maps.models import Location
def location_list(request):
"""Shows a list of locations in a map."""
object_list = Location.published.select_related('category').all()
context = {
'object_list': object_list,
}
return TemplateResponse(request, 'maps/object_list.html', context)
def _get_content(name, website):
if not website:
return name
return u'<div><h2><a href="%s">%s</a></h2></div>' % (website, name)
def _get_location_data(location):
return {
'latitude': location.position.latitude,
'longitude': location.position.longitude,
'name': location.name,
'website': location.website,
'category': location.category.name,
'image': location.get_image_url(),
'content': _get_content(location.name, location.website),
}
def location_list_json(request):
"""Returns the locations in JSON format"""
object_list = Location.published.select_related('category').all()
dict_list = [_get_location_data(l) for l in object_list]
response = 'map.render(%s)' % json.dumps(dict_list, cls=DjangoJSONEncoder)
return HttpResponse(response, content_type='application/javascript')
|
from django.template.response import TemplateResponse
from us_ignite.common.response import json_response
from us_ignite.maps.models import Location
def location_list(request):
"""Shows a list of locations in a map."""
object_list = Location.published.select_related('category').all()
context = {
'object_list': object_list,
}
return TemplateResponse(request, 'maps/object_list.html', context)
def _get_content(name, website):
if not website:
return name
return u'<div><h2><a href="%s">%s</a></h2></div>' % (website, name)
def _get_location_data(location):
return {
'latitude': location.position.latitude,
'longitude': location.position.longitude,
'name': location.name,
'website': location.website,
'category': location.category.name,
'image': location.get_image_url(),
'content': _get_content(location.name, location.website),
}
def location_list_json(request):
"""Returns the locations in JSON format"""
object_list = Location.published.select_related('category').all()
dict_list = [_get_location_data(l) for l in object_list]
return json_response(dict_list, callback='map.render')
|
Update ``maps`` to use the ``json_response`` function.
|
Update ``maps`` to use the ``json_response`` function.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
python
|
## Code Before:
import json
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse
from django.template.response import TemplateResponse
from us_ignite.maps.models import Location
def location_list(request):
"""Shows a list of locations in a map."""
object_list = Location.published.select_related('category').all()
context = {
'object_list': object_list,
}
return TemplateResponse(request, 'maps/object_list.html', context)
def _get_content(name, website):
if not website:
return name
return u'<div><h2><a href="%s">%s</a></h2></div>' % (website, name)
def _get_location_data(location):
return {
'latitude': location.position.latitude,
'longitude': location.position.longitude,
'name': location.name,
'website': location.website,
'category': location.category.name,
'image': location.get_image_url(),
'content': _get_content(location.name, location.website),
}
def location_list_json(request):
"""Returns the locations in JSON format"""
object_list = Location.published.select_related('category').all()
dict_list = [_get_location_data(l) for l in object_list]
response = 'map.render(%s)' % json.dumps(dict_list, cls=DjangoJSONEncoder)
return HttpResponse(response, content_type='application/javascript')
## Instruction:
Update ``maps`` to use the ``json_response`` function.
## Code After:
from django.template.response import TemplateResponse
from us_ignite.common.response import json_response
from us_ignite.maps.models import Location
def location_list(request):
"""Shows a list of locations in a map."""
object_list = Location.published.select_related('category').all()
context = {
'object_list': object_list,
}
return TemplateResponse(request, 'maps/object_list.html', context)
def _get_content(name, website):
if not website:
return name
return u'<div><h2><a href="%s">%s</a></h2></div>' % (website, name)
def _get_location_data(location):
return {
'latitude': location.position.latitude,
'longitude': location.position.longitude,
'name': location.name,
'website': location.website,
'category': location.category.name,
'image': location.get_image_url(),
'content': _get_content(location.name, location.website),
}
def location_list_json(request):
"""Returns the locations in JSON format"""
object_list = Location.published.select_related('category').all()
dict_list = [_get_location_data(l) for l in object_list]
return json_response(dict_list, callback='map.render')
|
# ... existing code ...
from django.template.response import TemplateResponse
from us_ignite.common.response import json_response
from us_ignite.maps.models import Location
# ... modified code ...
"""Returns the locations in JSON format"""
object_list = Location.published.select_related('category').all()
dict_list = [_get_location_data(l) for l in object_list]
return json_response(dict_list, callback='map.render')
# ... rest of the code ...
|
fc7577a0dfaefd763430cd42476b5d09ce1ef394
|
msgpack/msgpack.h
|
msgpack/msgpack.h
|
//
// msgpack.h
// msgpack
//
// Created by Ricardo Pereira on 13/10/2017.
//
//
#import <UIKit/UIKit.h>
//! Project version number for msgpack.
FOUNDATION_EXPORT double msgpackVersionNumber;
//! Project version string for msgpack.
FOUNDATION_EXPORT const unsigned char msgpackVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <msgpack/PublicHeader.h>
#import "MessagePack.h"
|
//
// msgpack.h
// msgpack
//
// Created by Ricardo Pereira on 13/10/2017.
//
//
#import <Foundation/Foundation.h>
//! Project version number for msgpack.
FOUNDATION_EXPORT double msgpackVersionNumber;
//! Project version string for msgpack.
FOUNDATION_EXPORT const unsigned char msgpackVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <msgpack/PublicHeader.h>
#import "MessagePack.h"
|
Remove UIKit usage by replacing it with Foundation
|
Remove UIKit usage by replacing it with Foundation
|
C
|
apache-2.0
|
rvi/msgpack-objective-C,rvi/msgpack-objective-C,rvi/msgpack-objective-C
|
c
|
## Code Before:
//
// msgpack.h
// msgpack
//
// Created by Ricardo Pereira on 13/10/2017.
//
//
#import <UIKit/UIKit.h>
//! Project version number for msgpack.
FOUNDATION_EXPORT double msgpackVersionNumber;
//! Project version string for msgpack.
FOUNDATION_EXPORT const unsigned char msgpackVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <msgpack/PublicHeader.h>
#import "MessagePack.h"
## Instruction:
Remove UIKit usage by replacing it with Foundation
## Code After:
//
// msgpack.h
// msgpack
//
// Created by Ricardo Pereira on 13/10/2017.
//
//
#import <Foundation/Foundation.h>
//! Project version number for msgpack.
FOUNDATION_EXPORT double msgpackVersionNumber;
//! Project version string for msgpack.
FOUNDATION_EXPORT const unsigned char msgpackVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <msgpack/PublicHeader.h>
#import "MessagePack.h"
|
# ... existing code ...
//
//
#import <Foundation/Foundation.h>
//! Project version number for msgpack.
FOUNDATION_EXPORT double msgpackVersionNumber;
# ... rest of the code ...
|
b84584178055da356c7af4bcfd02e2738310f785
|
kotlin-eclipse-core/src/org/jetbrains/kotlin/core/log/KotlinLogger.java
|
kotlin-eclipse-core/src/org/jetbrains/kotlin/core/log/KotlinLogger.java
|
package org.jetbrains.kotlin.core.log;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.jetbrains.kotlin.core.Activator;
public class KotlinLogger {
public static void log(IStatus status) {
Activator.getDefault().getLog().log(status);
}
public static void log(int severity, String message, Throwable exception) {
log(new Status(severity, Activator.PLUGIN_ID, message, exception));
}
public static void logError(Throwable exception) {
log(IStatus.ERROR, "Unexpected Exception", exception);
}
public static void logError(String message, Throwable exception) {
log(IStatus.ERROR, message, exception);
}
public static void logInfo(String message) {
log(IStatus.INFO, message, null);
}
}
|
package org.jetbrains.kotlin.core.log;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.jetbrains.kotlin.core.Activator;
public class KotlinLogger {
public static void log(IStatus status) {
Activator.getDefault().getLog().log(status);
}
public static void log(int severity, String message, Throwable exception) {
log(new Status(severity, Activator.PLUGIN_ID, message, exception));
}
public static void logError(Throwable exception) {
log(IStatus.ERROR, "Unexpected Exception", exception);
}
public static void logError(String message, Throwable exception) {
log(IStatus.ERROR, message, exception);
}
public static void logInfo(String message) {
log(IStatus.INFO, message, null);
}
public static void logAndThrow(Throwable exception) {
logError(exception);
throw new RuntimeException(exception);
}
}
|
Add method to log and throw runtime exception
|
Add method to log and throw runtime exception
|
Java
|
apache-2.0
|
noemus/kotlin-eclipse,noemus/kotlin-eclipse
|
java
|
## Code Before:
package org.jetbrains.kotlin.core.log;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.jetbrains.kotlin.core.Activator;
public class KotlinLogger {
public static void log(IStatus status) {
Activator.getDefault().getLog().log(status);
}
public static void log(int severity, String message, Throwable exception) {
log(new Status(severity, Activator.PLUGIN_ID, message, exception));
}
public static void logError(Throwable exception) {
log(IStatus.ERROR, "Unexpected Exception", exception);
}
public static void logError(String message, Throwable exception) {
log(IStatus.ERROR, message, exception);
}
public static void logInfo(String message) {
log(IStatus.INFO, message, null);
}
}
## Instruction:
Add method to log and throw runtime exception
## Code After:
package org.jetbrains.kotlin.core.log;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.jetbrains.kotlin.core.Activator;
public class KotlinLogger {
public static void log(IStatus status) {
Activator.getDefault().getLog().log(status);
}
public static void log(int severity, String message, Throwable exception) {
log(new Status(severity, Activator.PLUGIN_ID, message, exception));
}
public static void logError(Throwable exception) {
log(IStatus.ERROR, "Unexpected Exception", exception);
}
public static void logError(String message, Throwable exception) {
log(IStatus.ERROR, message, exception);
}
public static void logInfo(String message) {
log(IStatus.INFO, message, null);
}
public static void logAndThrow(Throwable exception) {
logError(exception);
throw new RuntimeException(exception);
}
}
|
// ... existing code ...
public static void logInfo(String message) {
log(IStatus.INFO, message, null);
}
public static void logAndThrow(Throwable exception) {
logError(exception);
throw new RuntimeException(exception);
}
}
// ... rest of the code ...
|
bd99ff7a3ec80cb935d4e80964805bf522f0bf61
|
src/main/java/org/realityforge/replicant/server/ee/rest/TokenRestService.java
|
src/main/java/org/realityforge/replicant/server/ee/rest/TokenRestService.java
|
package org.realityforge.replicant.server.ee.rest;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.realityforge.replicant.shared.transport.ReplicantContext;
import org.realityforge.ssf.SessionManager;
/**
* The token source is for generating the initial token.
*
* It is expected that this endpoint has already had security applied.
*/
@Path( ReplicantContext.TOKEN_URL_FRAGMENT )
@Produces( MediaType.TEXT_PLAIN )
public class TokenRestService
{
@Inject
private SessionManager _sessionManager;
@GET
public String generateToken()
{
return _sessionManager.createSession().getSessionID();
}
}
|
package org.realityforge.replicant.server.ee.rest;
import javax.ejb.EJB;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.realityforge.replicant.shared.transport.ReplicantContext;
import org.realityforge.ssf.SessionManager;
/**
* The token source is for generating the initial token.
*
* It is expected that this endpoint has already had security applied.
*/
@Path( ReplicantContext.TOKEN_URL_FRAGMENT )
@Produces( MediaType.TEXT_PLAIN )
public class TokenRestService
{
@EJB
private SessionManager _sessionManager;
@GET
public String generateToken()
{
return _sessionManager.createSession().getSessionID();
}
}
|
Use an @EJB annotation to comply with Domgen generated code
|
Use an @EJB annotation to comply with Domgen generated code
|
Java
|
apache-2.0
|
realityforge/replicant,realityforge/replicant
|
java
|
## Code Before:
package org.realityforge.replicant.server.ee.rest;
import javax.inject.Inject;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.realityforge.replicant.shared.transport.ReplicantContext;
import org.realityforge.ssf.SessionManager;
/**
* The token source is for generating the initial token.
*
* It is expected that this endpoint has already had security applied.
*/
@Path( ReplicantContext.TOKEN_URL_FRAGMENT )
@Produces( MediaType.TEXT_PLAIN )
public class TokenRestService
{
@Inject
private SessionManager _sessionManager;
@GET
public String generateToken()
{
return _sessionManager.createSession().getSessionID();
}
}
## Instruction:
Use an @EJB annotation to comply with Domgen generated code
## Code After:
package org.realityforge.replicant.server.ee.rest;
import javax.ejb.EJB;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.realityforge.replicant.shared.transport.ReplicantContext;
import org.realityforge.ssf.SessionManager;
/**
* The token source is for generating the initial token.
*
* It is expected that this endpoint has already had security applied.
*/
@Path( ReplicantContext.TOKEN_URL_FRAGMENT )
@Produces( MediaType.TEXT_PLAIN )
public class TokenRestService
{
@EJB
private SessionManager _sessionManager;
@GET
public String generateToken()
{
return _sessionManager.createSession().getSessionID();
}
}
|
...
package org.realityforge.replicant.server.ee.rest;
import javax.ejb.EJB;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
...
@Produces( MediaType.TEXT_PLAIN )
public class TokenRestService
{
@EJB
private SessionManager _sessionManager;
@GET
...
|
864631113fa8421f6c1042cee105923edc59257b
|
lib/com/vaadin/polymer/elemental/NodeList.java
|
lib/com/vaadin/polymer/elemental/NodeList.java
|
package com.vaadin.polymer.elemental;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
@JsType
public interface NodeList<T> {
@JsProperty
int getLength();
T item(int index);
}
|
package com.vaadin.polymer.elemental;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
@JsType
public interface NodeList {
@JsProperty
int getLength();
<T extends Node> T item(int index);
}
|
Move generic from class to method
|
Move generic from class to method
|
Java
|
apache-2.0
|
vaadin/gwt-api-generator,florian-f/gwt-api-generator,manolo/gwt-api-generator,manolo/gwt-api-generator,vaadin/gwt-api-generator,florian-f/gwt-api-generator
|
java
|
## Code Before:
package com.vaadin.polymer.elemental;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
@JsType
public interface NodeList<T> {
@JsProperty
int getLength();
T item(int index);
}
## Instruction:
Move generic from class to method
## Code After:
package com.vaadin.polymer.elemental;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
@JsType
public interface NodeList {
@JsProperty
int getLength();
<T extends Node> T item(int index);
}
|
// ... existing code ...
import com.google.gwt.core.client.js.JsType;
@JsType
public interface NodeList {
@JsProperty
int getLength();
<T extends Node> T item(int index);
}
// ... rest of the code ...
|
061ab90a735630bacf1c5894ac17033a8fe021c0
|
Drinks/src/main/java/fr/masciulli/drinks/activity/DrinkDetailActivity.java
|
Drinks/src/main/java/fr/masciulli/drinks/activity/DrinkDetailActivity.java
|
package fr.masciulli.drinks.activity;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.view.MenuItem;
import fr.masciulli.drinks.R;
import fr.masciulli.drinks.fragment.DrinkDetailFragment;
public class DrinkDetailActivity extends FragmentActivity {
private DrinkDetailFragment mDetailFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_drink_detail);
if (savedInstanceState == null) {
mDetailFragment = new DrinkDetailFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.drink_detail_container, new DrinkDetailFragment())
.commit();
}
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public void finish() {
super.finish();
// override transitions to skip the standard window animations
overridePendingTransition(0, 0);
}
@Override
public void onBackPressed() {
mDetailFragment.onBackPressed();
}
}
|
package fr.masciulli.drinks.activity;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.view.MenuItem;
import fr.masciulli.drinks.R;
import fr.masciulli.drinks.fragment.DrinkDetailFragment;
public class DrinkDetailActivity extends FragmentActivity {
private DrinkDetailFragment mDetailFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_drink_detail);
if (savedInstanceState == null) {
mDetailFragment = new DrinkDetailFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.drink_detail_container, mDetailFragment)
.commit();
}
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public void finish() {
super.finish();
// override transitions to skip the standard window animations
overridePendingTransition(0, 0);
}
@Override
public void onBackPressed() {
mDetailFragment.onBackPressed();
}
}
|
Fix bug at fragment creation
|
Fix bug at fragment creation
|
Java
|
apache-2.0
|
amasciul/Drinks,mechdome/Drinks,mechdome/Drinks,vbarthel-fr/Drinks,amasciul/Drinks,vbarthel-fr/Drinks,amasciul/Drinks
|
java
|
## Code Before:
package fr.masciulli.drinks.activity;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.view.MenuItem;
import fr.masciulli.drinks.R;
import fr.masciulli.drinks.fragment.DrinkDetailFragment;
public class DrinkDetailActivity extends FragmentActivity {
private DrinkDetailFragment mDetailFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_drink_detail);
if (savedInstanceState == null) {
mDetailFragment = new DrinkDetailFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.drink_detail_container, new DrinkDetailFragment())
.commit();
}
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public void finish() {
super.finish();
// override transitions to skip the standard window animations
overridePendingTransition(0, 0);
}
@Override
public void onBackPressed() {
mDetailFragment.onBackPressed();
}
}
## Instruction:
Fix bug at fragment creation
## Code After:
package fr.masciulli.drinks.activity;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.view.MenuItem;
import fr.masciulli.drinks.R;
import fr.masciulli.drinks.fragment.DrinkDetailFragment;
public class DrinkDetailActivity extends FragmentActivity {
private DrinkDetailFragment mDetailFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_drink_detail);
if (savedInstanceState == null) {
mDetailFragment = new DrinkDetailFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.drink_detail_container, mDetailFragment)
.commit();
}
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public void finish() {
super.finish();
// override transitions to skip the standard window animations
overridePendingTransition(0, 0);
}
@Override
public void onBackPressed() {
mDetailFragment.onBackPressed();
}
}
|
...
if (savedInstanceState == null) {
mDetailFragment = new DrinkDetailFragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.drink_detail_container, mDetailFragment)
.commit();
}
...
|
6fa751accb736b3c32522ca498210ffeebfef650
|
pytablereader/tsv/core.py
|
pytablereader/tsv/core.py
|
from .._validator import FileValidator, TextValidator
from ..csv.core import CsvTableFileLoader, CsvTableTextLoader
class TsvTableFileLoader(CsvTableFileLoader):
"""
Tab separated values (TSV) format file loader class.
:param str file_path: Path to the loading TSV file.
.. py:attribute:: table_name
Table name string. Defaults to ``%(filename)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, file_path):
super().__init__(file_path)
self.delimiter = "\t"
self._validator = FileValidator(file_path)
class TsvTableTextLoader(CsvTableTextLoader):
"""
Tab separated values (TSV) format text loader class.
:param str text: TSV text to load.
.. py:attribute:: table_name
Table name string. Defaults to ``%(format_name)s%(format_id)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, text):
super().__init__(text)
self.delimiter = "\t"
self._validator = TextValidator(text)
|
from .._validator import FileValidator, TextValidator
from ..csv.core import CsvTableFileLoader, CsvTableTextLoader
class TsvTableFileLoader(CsvTableFileLoader):
"""
Tab separated values (TSV) format file loader class.
:param str file_path: Path to the loading TSV file.
.. py:attribute:: table_name
Table name string. Defaults to ``%(filename)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, file_path, quoting_flags=None, type_hints=None, type_hint_rules=None):
super().__init__(file_path, quoting_flags, type_hints, type_hint_rules)
self.delimiter = "\t"
self._validator = FileValidator(file_path)
class TsvTableTextLoader(CsvTableTextLoader):
"""
Tab separated values (TSV) format text loader class.
:param str text: TSV text to load.
.. py:attribute:: table_name
Table name string. Defaults to ``%(format_name)s%(format_id)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, text, quoting_flags=None, type_hints=None, type_hint_rules=None):
super().__init__(text, quoting_flags, type_hints, type_hint_rules)
self.delimiter = "\t"
self._validator = TextValidator(text)
|
Modify TsvTableFileLoader/TsvTableTextLoader to accept additional keyword arguments
|
Modify TsvTableFileLoader/TsvTableTextLoader to accept additional keyword arguments
|
Python
|
mit
|
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
|
python
|
## Code Before:
from .._validator import FileValidator, TextValidator
from ..csv.core import CsvTableFileLoader, CsvTableTextLoader
class TsvTableFileLoader(CsvTableFileLoader):
"""
Tab separated values (TSV) format file loader class.
:param str file_path: Path to the loading TSV file.
.. py:attribute:: table_name
Table name string. Defaults to ``%(filename)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, file_path):
super().__init__(file_path)
self.delimiter = "\t"
self._validator = FileValidator(file_path)
class TsvTableTextLoader(CsvTableTextLoader):
"""
Tab separated values (TSV) format text loader class.
:param str text: TSV text to load.
.. py:attribute:: table_name
Table name string. Defaults to ``%(format_name)s%(format_id)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, text):
super().__init__(text)
self.delimiter = "\t"
self._validator = TextValidator(text)
## Instruction:
Modify TsvTableFileLoader/TsvTableTextLoader to accept additional keyword arguments
## Code After:
from .._validator import FileValidator, TextValidator
from ..csv.core import CsvTableFileLoader, CsvTableTextLoader
class TsvTableFileLoader(CsvTableFileLoader):
"""
Tab separated values (TSV) format file loader class.
:param str file_path: Path to the loading TSV file.
.. py:attribute:: table_name
Table name string. Defaults to ``%(filename)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, file_path, quoting_flags=None, type_hints=None, type_hint_rules=None):
super().__init__(file_path, quoting_flags, type_hints, type_hint_rules)
self.delimiter = "\t"
self._validator = FileValidator(file_path)
class TsvTableTextLoader(CsvTableTextLoader):
"""
Tab separated values (TSV) format text loader class.
:param str text: TSV text to load.
.. py:attribute:: table_name
Table name string. Defaults to ``%(format_name)s%(format_id)s``.
"""
@property
def format_name(self):
return "tsv"
def __init__(self, text, quoting_flags=None, type_hints=None, type_hint_rules=None):
super().__init__(text, quoting_flags, type_hints, type_hint_rules)
self.delimiter = "\t"
self._validator = TextValidator(text)
|
...
def format_name(self):
return "tsv"
def __init__(self, file_path, quoting_flags=None, type_hints=None, type_hint_rules=None):
super().__init__(file_path, quoting_flags, type_hints, type_hint_rules)
self.delimiter = "\t"
...
def format_name(self):
return "tsv"
def __init__(self, text, quoting_flags=None, type_hints=None, type_hint_rules=None):
super().__init__(text, quoting_flags, type_hints, type_hint_rules)
self.delimiter = "\t"
...
|
1e437be581a3d2e1176a66f4e2420ce7f37ead37
|
TRD/AliTRDPreprocessor.h
|
TRD/AliTRDPreprocessor.h
|
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
////////////////////////////////////////////////////////////////////////////
// //
// TRD preprocessor for the database SHUTTLE //
// //
////////////////////////////////////////////////////////////////////////////
#include "AliPreprocessor.h"
class AliTRDPreprocessor : public AliPreprocessor
{
public:
AliTRDPreprocessor(AliShuttleInterface *shuttle);
virtual ~AliTRDPreprocessor();
protected:
virtual void Initialize(Int_t run, UInt_t startTime, UInt_t endTime);
virtual UInt_t Process(TMap* /*dcsAliasMap*/);
private:
ClassDef(AliTRDPreprocessor,0);
};
#endif
|
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
////////////////////////////////////////////////////////////////////////////
// //
// TRD preprocessor for the database SHUTTLE //
// //
////////////////////////////////////////////////////////////////////////////
#include "AliPreprocessor.h"
class AliTRDPreprocessor : public AliPreprocessor
{
public:
AliTRDPreprocessor(AliShuttleInterface *shuttle);
virtual ~AliTRDPreprocessor();
protected:
virtual void Initialize(Int_t run, UInt_t startTime, UInt_t endTime);
virtual UInt_t Process(TMap* /*dcsAliasMap*/);
private:
ClassDef(AliTRDPreprocessor,0) // The SHUTTLE preprocessor for TRD
};
#endif
|
Add a comment behind ClassDef
|
Add a comment behind ClassDef
|
C
|
bsd-3-clause
|
ecalvovi/AliRoot,alisw/AliRoot,ecalvovi/AliRoot,miranov25/AliRoot,sebaleh/AliRoot,mkrzewic/AliRoot,jgrosseo/AliRoot,jgrosseo/AliRoot,sebaleh/AliRoot,coppedis/AliRoot,coppedis/AliRoot,ecalvovi/AliRoot,jgrosseo/AliRoot,ecalvovi/AliRoot,ALICEHLT/AliRoot,sebaleh/AliRoot,coppedis/AliRoot,miranov25/AliRoot,shahor02/AliRoot,alisw/AliRoot,shahor02/AliRoot,shahor02/AliRoot,coppedis/AliRoot,alisw/AliRoot,ALICEHLT/AliRoot,coppedis/AliRoot,shahor02/AliRoot,ALICEHLT/AliRoot,coppedis/AliRoot,miranov25/AliRoot,shahor02/AliRoot,shahor02/AliRoot,ALICEHLT/AliRoot,ecalvovi/AliRoot,mkrzewic/AliRoot,miranov25/AliRoot,alisw/AliRoot,jgrosseo/AliRoot,shahor02/AliRoot,ALICEHLT/AliRoot,ecalvovi/AliRoot,alisw/AliRoot,mkrzewic/AliRoot,miranov25/AliRoot,sebaleh/AliRoot,miranov25/AliRoot,jgrosseo/AliRoot,coppedis/AliRoot,miranov25/AliRoot,jgrosseo/AliRoot,coppedis/AliRoot,mkrzewic/AliRoot,alisw/AliRoot,alisw/AliRoot,ALICEHLT/AliRoot,mkrzewic/AliRoot,ecalvovi/AliRoot,sebaleh/AliRoot,mkrzewic/AliRoot,sebaleh/AliRoot,ALICEHLT/AliRoot,miranov25/AliRoot,sebaleh/AliRoot,jgrosseo/AliRoot,alisw/AliRoot,mkrzewic/AliRoot
|
c
|
## Code Before:
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
////////////////////////////////////////////////////////////////////////////
// //
// TRD preprocessor for the database SHUTTLE //
// //
////////////////////////////////////////////////////////////////////////////
#include "AliPreprocessor.h"
class AliTRDPreprocessor : public AliPreprocessor
{
public:
AliTRDPreprocessor(AliShuttleInterface *shuttle);
virtual ~AliTRDPreprocessor();
protected:
virtual void Initialize(Int_t run, UInt_t startTime, UInt_t endTime);
virtual UInt_t Process(TMap* /*dcsAliasMap*/);
private:
ClassDef(AliTRDPreprocessor,0);
};
#endif
## Instruction:
Add a comment behind ClassDef
## Code After:
/* Copyright(c) 1998-1999, ALICE Experiment at CERN, All rights reserved. *
* See cxx source for full Copyright notice */
/* $Id$ */
////////////////////////////////////////////////////////////////////////////
// //
// TRD preprocessor for the database SHUTTLE //
// //
////////////////////////////////////////////////////////////////////////////
#include "AliPreprocessor.h"
class AliTRDPreprocessor : public AliPreprocessor
{
public:
AliTRDPreprocessor(AliShuttleInterface *shuttle);
virtual ~AliTRDPreprocessor();
protected:
virtual void Initialize(Int_t run, UInt_t startTime, UInt_t endTime);
virtual UInt_t Process(TMap* /*dcsAliasMap*/);
private:
ClassDef(AliTRDPreprocessor,0) // The SHUTTLE preprocessor for TRD
};
#endif
|
...
private:
ClassDef(AliTRDPreprocessor,0) // The SHUTTLE preprocessor for TRD
};
...
|
433c39cd723992765802a36a4b77fbff6b7600ec
|
src/free/java/com/antew/redditinpictures/ApplicationModuleFree.java
|
src/free/java/com/antew/redditinpictures/ApplicationModuleFree.java
|
package com.antew.redditinpictures;
import com.antew.redditinpictures.library.RedditInPicturesApplication;
import com.antew.redditinpictures.ui.ImageDetailActivityFree;
import com.antew.redditinpictures.ui.ImageGridActivityFree;
import com.antew.redditinpictures.ui.ImageGridFragmentFree;
import com.antew.redditinpictures.ui.ImageListFragmentFree;
import com.antew.redditinpictures.ui.ImgurAlbumActivityFree;
import dagger.Module;
/**
* Dagger module for setting up provides statements.
* Register all of your entry points below.
*/
@Module
(
complete = false,
overrides = true,
injects = {
ImageGridFragmentFree.class,
ImageListFragmentFree.class,
ImageGridActivityFree.class,
ImageDetailActivityFree.class,
ImgurAlbumActivityFree.class
}, library = true
)
public class ApplicationModuleFree {
}
|
package com.antew.redditinpictures;
import com.antew.redditinpictures.library.RedditInPicturesApplication;
import com.antew.redditinpictures.library.ui.ImageDetailFragment;
import com.antew.redditinpictures.ui.ImageDetailActivityFree;
import com.antew.redditinpictures.ui.ImageGridActivityFree;
import com.antew.redditinpictures.ui.ImageGridFragmentFree;
import com.antew.redditinpictures.ui.ImageListFragmentFree;
import com.antew.redditinpictures.ui.ImgurAlbumActivityFree;
import dagger.Module;
/**
* Dagger module for setting up provides statements.
* Register all of your entry points below.
*/
@Module
(
complete = false,
overrides = true,
injects = {
ImageGridFragmentFree.class,
ImageListFragmentFree.class,
ImageGridActivityFree.class,
ImageDetailActivityFree.class,
ImageDetailFragment.class,
ImgurAlbumActivityFree.class
}, library = true
)
public class ApplicationModuleFree {
}
|
Add missing ImageDetailFragment declaration for dependency injection.
|
Add missing ImageDetailFragment declaration for dependency injection.
|
Java
|
apache-2.0
|
antew/RedditInPictures
|
java
|
## Code Before:
package com.antew.redditinpictures;
import com.antew.redditinpictures.library.RedditInPicturesApplication;
import com.antew.redditinpictures.ui.ImageDetailActivityFree;
import com.antew.redditinpictures.ui.ImageGridActivityFree;
import com.antew.redditinpictures.ui.ImageGridFragmentFree;
import com.antew.redditinpictures.ui.ImageListFragmentFree;
import com.antew.redditinpictures.ui.ImgurAlbumActivityFree;
import dagger.Module;
/**
* Dagger module for setting up provides statements.
* Register all of your entry points below.
*/
@Module
(
complete = false,
overrides = true,
injects = {
ImageGridFragmentFree.class,
ImageListFragmentFree.class,
ImageGridActivityFree.class,
ImageDetailActivityFree.class,
ImgurAlbumActivityFree.class
}, library = true
)
public class ApplicationModuleFree {
}
## Instruction:
Add missing ImageDetailFragment declaration for dependency injection.
## Code After:
package com.antew.redditinpictures;
import com.antew.redditinpictures.library.RedditInPicturesApplication;
import com.antew.redditinpictures.library.ui.ImageDetailFragment;
import com.antew.redditinpictures.ui.ImageDetailActivityFree;
import com.antew.redditinpictures.ui.ImageGridActivityFree;
import com.antew.redditinpictures.ui.ImageGridFragmentFree;
import com.antew.redditinpictures.ui.ImageListFragmentFree;
import com.antew.redditinpictures.ui.ImgurAlbumActivityFree;
import dagger.Module;
/**
* Dagger module for setting up provides statements.
* Register all of your entry points below.
*/
@Module
(
complete = false,
overrides = true,
injects = {
ImageGridFragmentFree.class,
ImageListFragmentFree.class,
ImageGridActivityFree.class,
ImageDetailActivityFree.class,
ImageDetailFragment.class,
ImgurAlbumActivityFree.class
}, library = true
)
public class ApplicationModuleFree {
}
|
// ... existing code ...
package com.antew.redditinpictures;
import com.antew.redditinpictures.library.RedditInPicturesApplication;
import com.antew.redditinpictures.library.ui.ImageDetailFragment;
import com.antew.redditinpictures.ui.ImageDetailActivityFree;
import com.antew.redditinpictures.ui.ImageGridActivityFree;
import com.antew.redditinpictures.ui.ImageGridFragmentFree;
// ... modified code ...
ImageListFragmentFree.class,
ImageGridActivityFree.class,
ImageDetailActivityFree.class,
ImageDetailFragment.class,
ImgurAlbumActivityFree.class
}, library = true
)
// ... rest of the code ...
|
8c31d1261589283f1b4690283e840059e6068c36
|
thirtyinch-kotlin/src/test/java/net/grandcentrix/thirtyinch/kotlin/TiPresenterTest.kt
|
thirtyinch-kotlin/src/test/java/net/grandcentrix/thirtyinch/kotlin/TiPresenterTest.kt
|
package net.grandcentrix.thirtyinch.kotlin
import com.nhaarman.mockito_kotlin.mock
import com.nhaarman.mockito_kotlin.verify
import net.grandcentrix.thirtyinch.TiPresenter
import net.grandcentrix.thirtyinch.TiView
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(JUnit4::class)
class TiPresenterTest {
interface View : TiView {
fun aViewMethod()
}
class TestPresenter : TiPresenter<View>()
private val mockView = mock<View>()
@Test
fun `test sendToViewKotlin should view as this and call it`() = with(TestPresenter()) {
val tiTestPresenter = test()
tiTestPresenter.create()
tiTestPresenter.attachView(mockView)
deliverToView { aViewMethod() }
verify(mockView).aViewMethod()
}
}
|
package net.grandcentrix.thirtyinch.kotlin
import com.nhaarman.mockito_kotlin.*
import net.grandcentrix.thirtyinch.TiPresenter
import net.grandcentrix.thirtyinch.TiView
import org.junit.*
import org.junit.runner.*
import org.junit.runners.*
@RunWith(JUnit4::class)
class TiPresenterTest {
interface View : TiView {
fun aViewMethod()
}
class TestPresenter : TiPresenter<View>()
private val mockView = mock<View>()
@Test
fun `test deliverToView should view as this and call it`() = with(TestPresenter()) {
val tiTestPresenter = test()
tiTestPresenter.attachView(mockView)
deliverToView { aViewMethod() }
verify(mockView).aViewMethod()
}
@Test
fun `test deliverToView without attached view`() = with(TestPresenter()) {
val tiTestPresenter = test()
deliverToView { aViewMethod() }
verify(mockView, never()).aViewMethod()
tiTestPresenter.attachView(mockView)
verify(mockView).aViewMethod()
}
}
|
Add test for deliverToView without view
|
Add test for deliverToView without view
|
Kotlin
|
apache-2.0
|
grandcentrix/ThirtyInch,grandcentrix/ThirtyInch
|
kotlin
|
## Code Before:
package net.grandcentrix.thirtyinch.kotlin
import com.nhaarman.mockito_kotlin.mock
import com.nhaarman.mockito_kotlin.verify
import net.grandcentrix.thirtyinch.TiPresenter
import net.grandcentrix.thirtyinch.TiView
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(JUnit4::class)
class TiPresenterTest {
interface View : TiView {
fun aViewMethod()
}
class TestPresenter : TiPresenter<View>()
private val mockView = mock<View>()
@Test
fun `test sendToViewKotlin should view as this and call it`() = with(TestPresenter()) {
val tiTestPresenter = test()
tiTestPresenter.create()
tiTestPresenter.attachView(mockView)
deliverToView { aViewMethod() }
verify(mockView).aViewMethod()
}
}
## Instruction:
Add test for deliverToView without view
## Code After:
package net.grandcentrix.thirtyinch.kotlin
import com.nhaarman.mockito_kotlin.*
import net.grandcentrix.thirtyinch.TiPresenter
import net.grandcentrix.thirtyinch.TiView
import org.junit.*
import org.junit.runner.*
import org.junit.runners.*
@RunWith(JUnit4::class)
class TiPresenterTest {
interface View : TiView {
fun aViewMethod()
}
class TestPresenter : TiPresenter<View>()
private val mockView = mock<View>()
@Test
fun `test deliverToView should view as this and call it`() = with(TestPresenter()) {
val tiTestPresenter = test()
tiTestPresenter.attachView(mockView)
deliverToView { aViewMethod() }
verify(mockView).aViewMethod()
}
@Test
fun `test deliverToView without attached view`() = with(TestPresenter()) {
val tiTestPresenter = test()
deliverToView { aViewMethod() }
verify(mockView, never()).aViewMethod()
tiTestPresenter.attachView(mockView)
verify(mockView).aViewMethod()
}
}
|
// ... existing code ...
package net.grandcentrix.thirtyinch.kotlin
import com.nhaarman.mockito_kotlin.*
import net.grandcentrix.thirtyinch.TiPresenter
import net.grandcentrix.thirtyinch.TiView
import org.junit.*
import org.junit.runner.*
import org.junit.runners.*
@RunWith(JUnit4::class)
class TiPresenterTest {
interface View : TiView {
fun aViewMethod()
}
class TestPresenter : TiPresenter<View>()
// ... modified code ...
private val mockView = mock<View>()
@Test
fun `test deliverToView should view as this and call it`() = with(TestPresenter()) {
val tiTestPresenter = test()
tiTestPresenter.attachView(mockView)
deliverToView { aViewMethod() }
...
verify(mockView).aViewMethod()
}
@Test
fun `test deliverToView without attached view`() = with(TestPresenter()) {
val tiTestPresenter = test()
deliverToView { aViewMethod() }
verify(mockView, never()).aViewMethod()
tiTestPresenter.attachView(mockView)
verify(mockView).aViewMethod()
}
}
// ... rest of the code ...
|
47701ec0cc22db7e87330bdb2a19b864915f729f
|
src/main/java/net/onrc/onos/core/matchaction/match/Ipv4Match.java
|
src/main/java/net/onrc/onos/core/matchaction/match/Ipv4Match.java
|
package net.onrc.onos.core.matchaction.match;
import net.onrc.onos.core.util.IPv4Net;
public class Ipv4Match implements Match {
IPv4Net dstIp;
public Ipv4Match(String ipAddressSlash) {
this.dstIp = new IPv4Net(ipAddressSlash);
}
public IPv4Net getDestination() {
return dstIp;
}
}
|
package net.onrc.onos.core.matchaction.match;
import net.onrc.onos.core.util.IPv4;
import net.onrc.onos.core.util.IPv4Net;
public class Ipv4Match implements Match {
IPv4Net dstIp;
public Ipv4Match(String ipAddressSlash) {
this.dstIp = new IPv4Net(ipAddressSlash);
IPv4 ip = dstIp.address();
short prefLen = dstIp.prefixLen();
int mask = ~((1 << (32 - prefLen)) - 1);;
int newIpInt = ip.value() & mask;
IPv4 newIp = new IPv4(newIpInt);
this.dstIp = new IPv4Net(newIp, prefLen);
}
public IPv4Net getDestination() {
return dstIp;
}
}
|
Change IP address mask according to the prefix when setting the IP forwarding rule.
|
Change IP address mask according to the prefix when setting the IP forwarding rule.
Change-Id: Ic0f8fb43913473c622336de0dcf3e0751ba01d65
|
Java
|
apache-2.0
|
opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open
|
java
|
## Code Before:
package net.onrc.onos.core.matchaction.match;
import net.onrc.onos.core.util.IPv4Net;
public class Ipv4Match implements Match {
IPv4Net dstIp;
public Ipv4Match(String ipAddressSlash) {
this.dstIp = new IPv4Net(ipAddressSlash);
}
public IPv4Net getDestination() {
return dstIp;
}
}
## Instruction:
Change IP address mask according to the prefix when setting the IP forwarding rule.
Change-Id: Ic0f8fb43913473c622336de0dcf3e0751ba01d65
## Code After:
package net.onrc.onos.core.matchaction.match;
import net.onrc.onos.core.util.IPv4;
import net.onrc.onos.core.util.IPv4Net;
public class Ipv4Match implements Match {
IPv4Net dstIp;
public Ipv4Match(String ipAddressSlash) {
this.dstIp = new IPv4Net(ipAddressSlash);
IPv4 ip = dstIp.address();
short prefLen = dstIp.prefixLen();
int mask = ~((1 << (32 - prefLen)) - 1);;
int newIpInt = ip.value() & mask;
IPv4 newIp = new IPv4(newIpInt);
this.dstIp = new IPv4Net(newIp, prefLen);
}
public IPv4Net getDestination() {
return dstIp;
}
}
|
...
package net.onrc.onos.core.matchaction.match;
import net.onrc.onos.core.util.IPv4;
import net.onrc.onos.core.util.IPv4Net;
public class Ipv4Match implements Match {
...
public Ipv4Match(String ipAddressSlash) {
this.dstIp = new IPv4Net(ipAddressSlash);
IPv4 ip = dstIp.address();
short prefLen = dstIp.prefixLen();
int mask = ~((1 << (32 - prefLen)) - 1);;
int newIpInt = ip.value() & mask;
IPv4 newIp = new IPv4(newIpInt);
this.dstIp = new IPv4Net(newIp, prefLen);
}
public IPv4Net getDestination() {
...
|
1fc1e160143b5a35741cf3fce9ced827a433d640
|
tests/test__pycompat.py
|
tests/test__pycompat.py
|
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
|
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
def test_izip():
r = dask_distance._pycompat.izip([1, 2, 3], ["a", "b", "c"])
assert not isinstance(r, list)
assert list(r) == [(1, 'a'), (2, 'b'), (3, 'c')]
|
Add a test for izip
|
Add a test for izip
Make sure that it generates an iterator on both Python 2 and Python 3.
Also check that it can be converted to a `list`.
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
python
|
## Code Before:
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
## Instruction:
Add a test for izip
Make sure that it generates an iterator on both Python 2 and Python 3.
Also check that it can be converted to a `list`.
## Code After:
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
def test_izip():
r = dask_distance._pycompat.izip([1, 2, 3], ["a", "b", "c"])
assert not isinstance(r, list)
assert list(r) == [(1, 'a'), (2, 'b'), (3, 'c')]
|
...
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
def test_izip():
r = dask_distance._pycompat.izip([1, 2, 3], ["a", "b", "c"])
assert not isinstance(r, list)
assert list(r) == [(1, 'a'), (2, 'b'), (3, 'c')]
...
|
91a9da3bb1dda73add2a3040d35c9c58f7b5b4a5
|
alg_lonely_integer.py
|
alg_lonely_integer.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def lonely_integer():
pass
def main():
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def lonely_integer_naive(a_list):
"""Lonely integer by naive dictionary.
Time complexity: O(n).
Space complexity: O(n).
"""
integer_count_d = {}
for x in a_list:
if x in integer_count_d:
integer_count_d[x] += 1
else:
integer_count_d[x] = 1
for integer, count in integer_count_d.items():
if count == 1:
return integer
def lonely_integer(a_list):
"""Lonely integer by bit operation.
Time complexity: O(n).
Space complexity: O(1).
"""
integer = 0
for x in a_list:
integer ^= x
return integer
def main():
import time
a_list = [9, 1, 2, 3, 2, 9, 1, 7, 7]
start_time = time.time()
print('Find lonely integer by naive dictionary: {}'
.format(lonely_integer_naive(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('Find lonely integer by bit operation: {}'
.format(lonely_integer(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Complete lonely int by naive dict & bit op
|
Complete lonely int by naive dict & bit op
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
python
|
## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def lonely_integer():
pass
def main():
pass
if __name__ == '__main__':
main()
## Instruction:
Complete lonely int by naive dict & bit op
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def lonely_integer_naive(a_list):
"""Lonely integer by naive dictionary.
Time complexity: O(n).
Space complexity: O(n).
"""
integer_count_d = {}
for x in a_list:
if x in integer_count_d:
integer_count_d[x] += 1
else:
integer_count_d[x] = 1
for integer, count in integer_count_d.items():
if count == 1:
return integer
def lonely_integer(a_list):
"""Lonely integer by bit operation.
Time complexity: O(n).
Space complexity: O(1).
"""
integer = 0
for x in a_list:
integer ^= x
return integer
def main():
import time
a_list = [9, 1, 2, 3, 2, 9, 1, 7, 7]
start_time = time.time()
print('Find lonely integer by naive dictionary: {}'
.format(lonely_integer_naive(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('Find lonely integer by bit operation: {}'
.format(lonely_integer(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
// ... existing code ...
from __future__ import division
from __future__ import print_function
def lonely_integer_naive(a_list):
"""Lonely integer by naive dictionary.
Time complexity: O(n).
Space complexity: O(n).
"""
integer_count_d = {}
for x in a_list:
if x in integer_count_d:
integer_count_d[x] += 1
else:
integer_count_d[x] = 1
for integer, count in integer_count_d.items():
if count == 1:
return integer
def lonely_integer(a_list):
"""Lonely integer by bit operation.
Time complexity: O(n).
Space complexity: O(1).
"""
integer = 0
for x in a_list:
integer ^= x
return integer
def main():
import time
a_list = [9, 1, 2, 3, 2, 9, 1, 7, 7]
start_time = time.time()
print('Find lonely integer by naive dictionary: {}'
.format(lonely_integer_naive(a_list)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('Find lonely integer by bit operation: {}'
.format(lonely_integer(a_list)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
// ... rest of the code ...
|
114f06fb7e332246b2ce455c96f4da32d10ffa53
|
app/controllers/HomeController.java
|
app/controllers/HomeController.java
|
package controllers;
import play.mvc.Controller;
import play.mvc.Result;
import views.html.confighelper;
import views.html.index;
/**
* This controller contains an action to handle HTTP requests
* to the application's home page.
*/
public class HomeController extends Controller {
/**
* Renders the main home page and provides the Gnag client ID used to create the GitHub authentication
* link.
* @return
*/
public Result index() {
return ok(index.render());
}
/**
* Will show the page for generating a Gradle config for a specific project. If the user has not authorized GitHub
* this will redirect to start the authentication flow.
* @return
*/
public Result configHelper() {
if (session(GitHubAuthController.TOKEN_KEY) == null) {
return redirect("/startAuth");
} else {
return ok(confighelper.render(session(GitHubAuthController.TOKEN_KEY)));
}
}
}
|
package controllers;
import play.mvc.Controller;
import play.mvc.Result;
import views.html.confighelper;
import views.html.index;
/**
* This controller contains an action to handle HTTP requests
* to the application's home page.
*/
public class HomeController extends Controller {
/**
* Renders the main home page and provides the Gnag client ID used to create the GitHub authentication
* link.
* @return
*/
public Result index() {
session().remove(GitHubAuthController.TOKEN_KEY);
return ok(index.render());
}
/**
* Will show the page for generating a Gradle config for a specific project. If the user has not authorized GitHub
* this will redirect to start the authentication flow.
* @return
*/
public Result configHelper() {
if (session(GitHubAuthController.TOKEN_KEY) == null) {
return redirect("/startAuth");
} else {
return ok(confighelper.render(session(GitHubAuthController.TOKEN_KEY)));
}
}
}
|
Remove token from session once back on the index page
|
Remove token from session once back on the index page
|
Java
|
apache-2.0
|
btkelly/gnag-website,btkelly/gnag-website,btkelly/gnag-website
|
java
|
## Code Before:
package controllers;
import play.mvc.Controller;
import play.mvc.Result;
import views.html.confighelper;
import views.html.index;
/**
* This controller contains an action to handle HTTP requests
* to the application's home page.
*/
public class HomeController extends Controller {
/**
* Renders the main home page and provides the Gnag client ID used to create the GitHub authentication
* link.
* @return
*/
public Result index() {
return ok(index.render());
}
/**
* Will show the page for generating a Gradle config for a specific project. If the user has not authorized GitHub
* this will redirect to start the authentication flow.
* @return
*/
public Result configHelper() {
if (session(GitHubAuthController.TOKEN_KEY) == null) {
return redirect("/startAuth");
} else {
return ok(confighelper.render(session(GitHubAuthController.TOKEN_KEY)));
}
}
}
## Instruction:
Remove token from session once back on the index page
## Code After:
package controllers;
import play.mvc.Controller;
import play.mvc.Result;
import views.html.confighelper;
import views.html.index;
/**
* This controller contains an action to handle HTTP requests
* to the application's home page.
*/
public class HomeController extends Controller {
/**
* Renders the main home page and provides the Gnag client ID used to create the GitHub authentication
* link.
* @return
*/
public Result index() {
session().remove(GitHubAuthController.TOKEN_KEY);
return ok(index.render());
}
/**
* Will show the page for generating a Gradle config for a specific project. If the user has not authorized GitHub
* this will redirect to start the authentication flow.
* @return
*/
public Result configHelper() {
if (session(GitHubAuthController.TOKEN_KEY) == null) {
return redirect("/startAuth");
} else {
return ok(confighelper.render(session(GitHubAuthController.TOKEN_KEY)));
}
}
}
|
// ... existing code ...
* @return
*/
public Result index() {
session().remove(GitHubAuthController.TOKEN_KEY);
return ok(index.render());
}
// ... rest of the code ...
|
c486d44cbb6007d5a89f36746822e68ea8cb0afa
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="cumulus",
version="0.1.0",
description="Girder API endpoints for interacting with cloud providers.",
author="Chris Haris",
author_email="[email protected]",
url="https://github.com/Kitware/cumulus",
packages=find_packages(),
package_data={
"": ["*.json"],
"cumulus": ["conf/*.json"],
})
|
from setuptools import setup, find_packages
setup(
name="cumulus",
version="0.1.0",
description="Girder API endpoints for interacting with cloud providers.",
author="Chris Haris",
author_email="[email protected]",
url="https://github.com/Kitware/cumulus",
packages=find_packages(exclude=["*.tests", "*.tests.*",
"tests.*", "tests"]),
package_data={
"": ["*.json", "*.sh"],
"cumulus": ["conf/*.json"],
})
|
Exclude unit tests from install
|
Exclude unit tests from install
|
Python
|
apache-2.0
|
Kitware/cumulus,Kitware/cumulus,cjh1/cumulus,cjh1/cumulus
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name="cumulus",
version="0.1.0",
description="Girder API endpoints for interacting with cloud providers.",
author="Chris Haris",
author_email="[email protected]",
url="https://github.com/Kitware/cumulus",
packages=find_packages(),
package_data={
"": ["*.json"],
"cumulus": ["conf/*.json"],
})
## Instruction:
Exclude unit tests from install
## Code After:
from setuptools import setup, find_packages
setup(
name="cumulus",
version="0.1.0",
description="Girder API endpoints for interacting with cloud providers.",
author="Chris Haris",
author_email="[email protected]",
url="https://github.com/Kitware/cumulus",
packages=find_packages(exclude=["*.tests", "*.tests.*",
"tests.*", "tests"]),
package_data={
"": ["*.json", "*.sh"],
"cumulus": ["conf/*.json"],
})
|
...
author="Chris Haris",
author_email="[email protected]",
url="https://github.com/Kitware/cumulus",
packages=find_packages(exclude=["*.tests", "*.tests.*",
"tests.*", "tests"]),
package_data={
"": ["*.json", "*.sh"],
"cumulus": ["conf/*.json"],
})
...
|
8d33b30950169e79d58e79b81781576bbcf1eb50
|
src/main/java/jp/ac/nii/prl/mape/autoscaling/analysis/model/dto/DeploymentFactory.java
|
src/main/java/jp/ac/nii/prl/mape/autoscaling/analysis/model/dto/DeploymentFactory.java
|
package jp.ac.nii.prl.mape.autoscaling.analysis.model.dto;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Adaptation;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Deployment;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Instance;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.InstanceType;
public class DeploymentFactory {
public static Deployment createDeployment(DeploymentDTO dto) {
return null;
}
public static DeploymentDTO createDeploymentDTO(Deployment deployment) {
return null;
}
public static Adaptation createAdaptation(AdaptationDTO dto) {
return null;
}
public static AdaptationDTO createAdaptationDTO(Adaptation adaptation) {
return null;
}
public static Instance createInstance(InstanceDTO dto) {
return null;
}
public static InstanceDTO createInstanceDTO(Instance instance) {
return null;
}
public static InstanceType createInstanceType(InstanceTypeDTO dto) {
return null;
}
public static InstanceTypeDTO createInstanceTypeDTO(InstanceType instanceType) {
return null;
}
}
|
package jp.ac.nii.prl.mape.autoscaling.analysis.model.dto;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Adaptation;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Deployment;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Instance;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.InstanceType;
public class DeploymentFactory {
public static Deployment createDeployment(DeploymentDTO dto) {
return null;
}
public static DeploymentDTO createDeploymentDTO(Deployment deployment) {
return null;
}
public static Adaptation createAdaptation(AdaptationDTO dto, Deployment deployment) {
Adaptation adaptation = new Adaptation();
adaptation.setAdapt(dto.isAdapt());
adaptation.setCpuCount(dto.getCpuCount());
adaptation.setScaleUp(dto.isScaleUp());
adaptation.setDeployment(deployment);
return adaptation;
}
public static AdaptationDTO createAdaptationDTO(Adaptation adaptation) {
AdaptationDTO dto = new AdaptationDTO();
dto.setAdapt(adaptation.isAdapt());
dto.setCpuCount(adaptation.getCpuCount());
dto.setScaleUp(adaptation.isScaleUp());
return dto;
}
public static Instance createInstance(InstanceDTO dto) {
return null;
}
public static InstanceDTO createInstanceDTO(Instance instance) {
return null;
}
public static InstanceType createInstanceType(InstanceTypeDTO dto) {
return null;
}
public static InstanceTypeDTO createInstanceTypeDTO(InstanceType instanceType) {
return null;
}
}
|
Implement Adaptation to AdaptationDTO and back
|
Implement Adaptation to AdaptationDTO and back
|
Java
|
mit
|
prl-tokyo/MAPE-autoscaling-analysis-service,prl-tokyo/MAPE-autoscaling-analysis-service
|
java
|
## Code Before:
package jp.ac.nii.prl.mape.autoscaling.analysis.model.dto;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Adaptation;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Deployment;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Instance;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.InstanceType;
public class DeploymentFactory {
public static Deployment createDeployment(DeploymentDTO dto) {
return null;
}
public static DeploymentDTO createDeploymentDTO(Deployment deployment) {
return null;
}
public static Adaptation createAdaptation(AdaptationDTO dto) {
return null;
}
public static AdaptationDTO createAdaptationDTO(Adaptation adaptation) {
return null;
}
public static Instance createInstance(InstanceDTO dto) {
return null;
}
public static InstanceDTO createInstanceDTO(Instance instance) {
return null;
}
public static InstanceType createInstanceType(InstanceTypeDTO dto) {
return null;
}
public static InstanceTypeDTO createInstanceTypeDTO(InstanceType instanceType) {
return null;
}
}
## Instruction:
Implement Adaptation to AdaptationDTO and back
## Code After:
package jp.ac.nii.prl.mape.autoscaling.analysis.model.dto;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Adaptation;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Deployment;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.Instance;
import jp.ac.nii.prl.mape.autoscaling.analysis.model.InstanceType;
public class DeploymentFactory {
public static Deployment createDeployment(DeploymentDTO dto) {
return null;
}
public static DeploymentDTO createDeploymentDTO(Deployment deployment) {
return null;
}
public static Adaptation createAdaptation(AdaptationDTO dto, Deployment deployment) {
Adaptation adaptation = new Adaptation();
adaptation.setAdapt(dto.isAdapt());
adaptation.setCpuCount(dto.getCpuCount());
adaptation.setScaleUp(dto.isScaleUp());
adaptation.setDeployment(deployment);
return adaptation;
}
public static AdaptationDTO createAdaptationDTO(Adaptation adaptation) {
AdaptationDTO dto = new AdaptationDTO();
dto.setAdapt(adaptation.isAdapt());
dto.setCpuCount(adaptation.getCpuCount());
dto.setScaleUp(adaptation.isScaleUp());
return dto;
}
public static Instance createInstance(InstanceDTO dto) {
return null;
}
public static InstanceDTO createInstanceDTO(Instance instance) {
return null;
}
public static InstanceType createInstanceType(InstanceTypeDTO dto) {
return null;
}
public static InstanceTypeDTO createInstanceTypeDTO(InstanceType instanceType) {
return null;
}
}
|
...
return null;
}
public static Adaptation createAdaptation(AdaptationDTO dto, Deployment deployment) {
Adaptation adaptation = new Adaptation();
adaptation.setAdapt(dto.isAdapt());
adaptation.setCpuCount(dto.getCpuCount());
adaptation.setScaleUp(dto.isScaleUp());
adaptation.setDeployment(deployment);
return adaptation;
}
public static AdaptationDTO createAdaptationDTO(Adaptation adaptation) {
AdaptationDTO dto = new AdaptationDTO();
dto.setAdapt(adaptation.isAdapt());
dto.setCpuCount(adaptation.getCpuCount());
dto.setScaleUp(adaptation.isScaleUp());
return dto;
}
public static Instance createInstance(InstanceDTO dto) {
...
|
4aaa5c8f5777910ace7d4c65908d42f16f446c30
|
ProvisionQL/Shared.h
|
ProvisionQL/Shared.h
|
static NSString * const kPluginBundleId = @"com.FerretSyndicate.ProvisionQL";
static NSString * const kDataType_ipa = @"com.apple.itunes.ipa";
static NSString * const kDataType_app = @"com.apple.application-bundle";
static NSString * const kDataType_ios_provision = @"com.apple.mobileprovision";
static NSString * const kDataType_ios_provision_old = @"com.apple.iphone.mobileprovision";
static NSString * const kDataType_osx_provision = @"com.apple.provisionprofile";
#define SIGNED_CODE 0
NSImage *roundCorners(NSImage *image);
NSImage *imageFromApp(NSURL *URL, NSString *dataType, NSString *fileName);
NSString *mainIconNameForApp(NSDictionary *appPropertyList);
int expirationStatus(NSDate *date, NSCalendar *calendar);
|
static NSString * const kPluginBundleId = @"com.ealeksandrov.ProvisionQL";
static NSString * const kDataType_ipa = @"com.apple.itunes.ipa";
static NSString * const kDataType_app = @"com.apple.application-bundle";
static NSString * const kDataType_ios_provision = @"com.apple.mobileprovision";
static NSString * const kDataType_ios_provision_old = @"com.apple.iphone.mobileprovision";
static NSString * const kDataType_osx_provision = @"com.apple.provisionprofile";
#define SIGNED_CODE 0
NSImage *roundCorners(NSImage *image);
NSImage *imageFromApp(NSURL *URL, NSString *dataType, NSString *fileName);
NSString *mainIconNameForApp(NSDictionary *appPropertyList);
int expirationStatus(NSDate *date, NSCalendar *calendar);
|
Set bundle identifier constant to be identical to Info.plist
|
Set bundle identifier constant to be identical to Info.plist
|
C
|
mit
|
ealeksandrov/ProvisionQL,ealeksandrov/ProvisionQL,ealeksandrov/ProvisionQL
|
c
|
## Code Before:
static NSString * const kPluginBundleId = @"com.FerretSyndicate.ProvisionQL";
static NSString * const kDataType_ipa = @"com.apple.itunes.ipa";
static NSString * const kDataType_app = @"com.apple.application-bundle";
static NSString * const kDataType_ios_provision = @"com.apple.mobileprovision";
static NSString * const kDataType_ios_provision_old = @"com.apple.iphone.mobileprovision";
static NSString * const kDataType_osx_provision = @"com.apple.provisionprofile";
#define SIGNED_CODE 0
NSImage *roundCorners(NSImage *image);
NSImage *imageFromApp(NSURL *URL, NSString *dataType, NSString *fileName);
NSString *mainIconNameForApp(NSDictionary *appPropertyList);
int expirationStatus(NSDate *date, NSCalendar *calendar);
## Instruction:
Set bundle identifier constant to be identical to Info.plist
## Code After:
static NSString * const kPluginBundleId = @"com.ealeksandrov.ProvisionQL";
static NSString * const kDataType_ipa = @"com.apple.itunes.ipa";
static NSString * const kDataType_app = @"com.apple.application-bundle";
static NSString * const kDataType_ios_provision = @"com.apple.mobileprovision";
static NSString * const kDataType_ios_provision_old = @"com.apple.iphone.mobileprovision";
static NSString * const kDataType_osx_provision = @"com.apple.provisionprofile";
#define SIGNED_CODE 0
NSImage *roundCorners(NSImage *image);
NSImage *imageFromApp(NSURL *URL, NSString *dataType, NSString *fileName);
NSString *mainIconNameForApp(NSDictionary *appPropertyList);
int expirationStatus(NSDate *date, NSCalendar *calendar);
|
...
static NSString * const kPluginBundleId = @"com.ealeksandrov.ProvisionQL";
static NSString * const kDataType_ipa = @"com.apple.itunes.ipa";
static NSString * const kDataType_app = @"com.apple.application-bundle";
static NSString * const kDataType_ios_provision = @"com.apple.mobileprovision";
...
|
a9844bad75c66e10f85be4555c9ad7aa2df15585
|
src/trajectory_server.py
|
src/trajectory_server.py
|
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
Remove import that was not used
|
Remove import that was not used
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
python
|
## Code Before:
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
## Instruction:
Remove import that was not used
## Code After:
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
// ... existing code ...
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
// ... rest of the code ...
|
57f8811ed729ecbc2c79429e8e18d211d888b82c
|
app/src/main/java/com/i906/mpt/date/DateProvider.java
|
app/src/main/java/com/i906/mpt/date/DateProvider.java
|
package com.i906.mpt.date;
import java.util.Calendar;
import java.util.TimeZone;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* @author Noorzaini Ilhami
*/
@Singleton
class DateProvider {
private final Calendar mCalendar;
@Inject
DateProvider() {
mCalendar = getCalendarInstance();
}
private void refresh() {
mCalendar.setTimeInMillis(getCurrentTime());
}
public long getCurrentTime() {
return System.currentTimeMillis();
}
public Calendar getNow() {
refresh();
return mCalendar;
}
public Calendar getCalendarInstance() {
return Calendar.getInstance(TimeZone.getTimeZone("GMT+8"));
}
}
|
package com.i906.mpt.date;
import java.util.Calendar;
import java.util.TimeZone;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* @author Noorzaini Ilhami
*/
@Singleton
class DateProvider {
private Calendar mCalendar;
@Inject
DateProvider() {
}
private void refresh() {
mCalendar = getCalendarInstance();
}
public long getCurrentTime() {
return System.currentTimeMillis();
}
public Calendar getNow() {
refresh();
return mCalendar;
}
public Calendar getCalendarInstance() {
return Calendar.getInstance(TimeZone.getTimeZone("GMT+8"));
}
}
|
Fix random wrong month or year in prayer times
|
Fix random wrong month or year in prayer times
|
Java
|
apache-2.0
|
AhmadMuzakkir/MalaysiaPrayerTimesAndroid,MalaysiaPrayerTimes/android,MalaysiaPrayerTimes/android
|
java
|
## Code Before:
package com.i906.mpt.date;
import java.util.Calendar;
import java.util.TimeZone;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* @author Noorzaini Ilhami
*/
@Singleton
class DateProvider {
private final Calendar mCalendar;
@Inject
DateProvider() {
mCalendar = getCalendarInstance();
}
private void refresh() {
mCalendar.setTimeInMillis(getCurrentTime());
}
public long getCurrentTime() {
return System.currentTimeMillis();
}
public Calendar getNow() {
refresh();
return mCalendar;
}
public Calendar getCalendarInstance() {
return Calendar.getInstance(TimeZone.getTimeZone("GMT+8"));
}
}
## Instruction:
Fix random wrong month or year in prayer times
## Code After:
package com.i906.mpt.date;
import java.util.Calendar;
import java.util.TimeZone;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* @author Noorzaini Ilhami
*/
@Singleton
class DateProvider {
private Calendar mCalendar;
@Inject
DateProvider() {
}
private void refresh() {
mCalendar = getCalendarInstance();
}
public long getCurrentTime() {
return System.currentTimeMillis();
}
public Calendar getNow() {
refresh();
return mCalendar;
}
public Calendar getCalendarInstance() {
return Calendar.getInstance(TimeZone.getTimeZone("GMT+8"));
}
}
|
# ... existing code ...
@Singleton
class DateProvider {
private Calendar mCalendar;
@Inject
DateProvider() {
}
private void refresh() {
mCalendar = getCalendarInstance();
}
public long getCurrentTime() {
# ... rest of the code ...
|
6eeecb5e36e5551ba3a3c35a9c7f52393d2f9d14
|
src/puzzle/problems/problem.py
|
src/puzzle/problems/problem.py
|
from src.data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
|
from src.data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
@property
def kind(self):
return str(type(self)).strip("'<>").split('.').pop()
@property
def solution(self):
return self.solutions().peek()
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
|
Add simple helper properties to Problem.
|
Add simple helper properties to Problem.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
python
|
## Code Before:
from src.data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
## Instruction:
Add simple helper properties to Problem.
## Code After:
from src.data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
@property
def kind(self):
return str(type(self)).strip("'<>").split('.').pop()
@property
def solution(self):
return self.solutions().peek()
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
|
// ... existing code ...
self.lines = lines
self._solutions = None
self._constraints = []
@property
def kind(self):
return str(type(self)).strip("'<>").split('.').pop()
@property
def solution(self):
return self.solutions().peek()
def constrain(self, fn):
self._constraints.append(fn)
// ... rest of the code ...
|
382d304a3f8a4a1d2a396074836ed6e951245800
|
cropList.py
|
cropList.py
|
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
|
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket
|
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket
|
Python
|
mit
|
nightjuggler/pig,nightjuggler/pig,nightjuggler/pig
|
python
|
## Code Before:
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
## Instruction:
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket
## Code After:
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
# ... existing code ...
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
# ... rest of the code ...
|
81321c880914cd7de46c1234d3cc73eaedd1350e
|
src/libsodium/include/sodium/utils.h
|
src/libsodium/include/sodium/utils.h
|
extern "C" {
#endif
#ifndef __cplusplus
# define _SODIUM_C99(X) X
#else
# define _SODIUM_C99(X)
#endif
unsigned char *_sodium_alignedcalloc(unsigned char ** const unaligned_p,
const size_t len);
SODIUM_EXPORT
void sodium_memzero(void * const pnt, const size_t len);
SODIUM_EXPORT
int sodium_memcmp(const void * const b1_, const void * const b2_, size_t size);
SODIUM_EXPORT
char *sodium_bin2hex(char * const hex, const size_t hexlen,
const unsigned char *bin, const size_t binlen);
#ifdef __cplusplus
}
#endif
#endif
|
extern "C" {
#endif
#if defined(__cplusplus) || !defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L
# define _SODIUM_C99(X)
#else
# define _SODIUM_C99(X) X
#endif
unsigned char *_sodium_alignedcalloc(unsigned char ** const unaligned_p,
const size_t len);
SODIUM_EXPORT
void sodium_memzero(void * const pnt, const size_t len);
SODIUM_EXPORT
int sodium_memcmp(const void * const b1_, const void * const b2_, size_t size);
SODIUM_EXPORT
char *sodium_bin2hex(char * const hex, const size_t hexlen,
const unsigned char *bin, const size_t binlen);
#ifdef __cplusplus
}
#endif
#endif
|
Define _SODIUM_C99 as empty on retarded compilers, not only when using C++
|
Define _SODIUM_C99 as empty on retarded compilers, not only when using C++
|
C
|
isc
|
eburkitt/libsodium,CyanogenMod/android_external_dnscrypt_libsodium,tml/libsodium,soumith/libsodium,eburkitt/libsodium,mvduin/libsodium,CyanogenMod/android_external_dnscrypt_libsodium,zhuqling/libsodium,netroby/libsodium,pmienk/libsodium,eburkitt/libsodium,donpark/libsodium,GreatFruitOmsk/libsodium,optedoblivion/android_external_libsodium,mvduin/libsodium,SpiderOak/libsodium,rustyhorde/libsodium,kytvi2p/libsodium,HappyYang/libsodium,pmienk/libsodium,paragonie-scott/libsodium,Payshare/libsodium,akkakks/libsodium,SpiderOak/libsodium,paragonie-scott/libsodium,netroby/libsodium,JackWink/libsodium,donpark/libsodium,mvduin/libsodium,zhuqling/libsodium,akkakks/libsodium,paragonie-scott/libsodium,netroby/libsodium,pyparallel/libsodium,kytvi2p/libsodium,optedoblivion/android_external_libsodium,JackWink/libsodium,optedoblivion/android_external_libsodium,tml/libsodium,pyparallel/libsodium,Payshares/libsodium,JackWink/libsodium,Payshare/libsodium,soumith/libsodium,Payshare/libsodium,zhuqling/libsodium,donpark/libsodium,akkakks/libsodium,CyanogenMod/android_external_dnscrypt_libsodium,pmienk/libsodium,GreatFruitOmsk/libsodium,SpiderOak/libsodium,HappyYang/libsodium,pyparallel/libsodium,SpiderOak/libsodium,kytvi2p/libsodium,HappyYang/libsodium,Payshares/libsodium,soumith/libsodium,akkakks/libsodium,tml/libsodium,rustyhorde/libsodium,GreatFruitOmsk/libsodium,rustyhorde/libsodium,Payshares/libsodium,rustyhorde/libsodium
|
c
|
## Code Before:
extern "C" {
#endif
#ifndef __cplusplus
# define _SODIUM_C99(X) X
#else
# define _SODIUM_C99(X)
#endif
unsigned char *_sodium_alignedcalloc(unsigned char ** const unaligned_p,
const size_t len);
SODIUM_EXPORT
void sodium_memzero(void * const pnt, const size_t len);
SODIUM_EXPORT
int sodium_memcmp(const void * const b1_, const void * const b2_, size_t size);
SODIUM_EXPORT
char *sodium_bin2hex(char * const hex, const size_t hexlen,
const unsigned char *bin, const size_t binlen);
#ifdef __cplusplus
}
#endif
#endif
## Instruction:
Define _SODIUM_C99 as empty on retarded compilers, not only when using C++
## Code After:
extern "C" {
#endif
#if defined(__cplusplus) || !defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L
# define _SODIUM_C99(X)
#else
# define _SODIUM_C99(X) X
#endif
unsigned char *_sodium_alignedcalloc(unsigned char ** const unaligned_p,
const size_t len);
SODIUM_EXPORT
void sodium_memzero(void * const pnt, const size_t len);
SODIUM_EXPORT
int sodium_memcmp(const void * const b1_, const void * const b2_, size_t size);
SODIUM_EXPORT
char *sodium_bin2hex(char * const hex, const size_t hexlen,
const unsigned char *bin, const size_t binlen);
#ifdef __cplusplus
}
#endif
#endif
|
// ... existing code ...
extern "C" {
#endif
#if defined(__cplusplus) || !defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L
# define _SODIUM_C99(X)
#else
# define _SODIUM_C99(X) X
#endif
unsigned char *_sodium_alignedcalloc(unsigned char ** const unaligned_p,
// ... rest of the code ...
|
8a4aea90aafaca4d4f28873c915ad1f4043dc14b
|
src/main/java/techreborn/world/DungeonLoot.java
|
src/main/java/techreborn/world/DungeonLoot.java
|
package techreborn.world;
import net.minecraft.item.ItemStack;
import net.minecraft.util.WeightedRandomChestContent;
import net.minecraftforge.common.ChestGenHooks;
import techreborn.items.ItemIngots;
public class DungeonLoot {
public static void init()
{
generate(ItemIngots.getIngotByName("steel"), 5);
}
public static void generate(ItemStack itemStack, int rare)
{
ChestGenHooks.getInfo(ChestGenHooks.DUNGEON_CHEST).addItem(new WeightedRandomChestContent(itemStack, itemStack.getItemDamage(), itemStack.stackSize, rare));
ChestGenHooks.getInfo(ChestGenHooks.MINESHAFT_CORRIDOR).addItem(new WeightedRandomChestContent(itemStack, itemStack.getItemDamage(), itemStack.stackSize, rare));
ChestGenHooks.getInfo(ChestGenHooks.PYRAMID_DESERT_CHEST).addItem(new WeightedRandomChestContent(itemStack ,itemStack.getItemDamage(), itemStack.stackSize, rare));
ChestGenHooks.getInfo(ChestGenHooks.STRONGHOLD_CORRIDOR).addItem(new WeightedRandomChestContent(itemStack ,itemStack.getItemDamage(), itemStack.stackSize, rare));
}
}
|
package techreborn.world;
import java.util.Arrays;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.WeightedRandomChestContent;
import net.minecraftforge.common.ChestGenHooks;
import techreborn.items.ItemIngots;
public class DungeonLoot {
public static void init()
{
generate(ItemIngots.getIngotByName("steel").getItem(), 5);
}
public static void generate(Item item, int rare)
{
for (String category : Arrays.asList(ChestGenHooks.VILLAGE_BLACKSMITH, ChestGenHooks.MINESHAFT_CORRIDOR, ChestGenHooks.PYRAMID_DESERT_CHEST, ChestGenHooks.PYRAMID_JUNGLE_CHEST, ChestGenHooks.PYRAMID_JUNGLE_DISPENSER, ChestGenHooks.STRONGHOLD_CORRIDOR, ChestGenHooks.STRONGHOLD_LIBRARY, ChestGenHooks.STRONGHOLD_CROSSING, ChestGenHooks.BONUS_CHEST, ChestGenHooks.DUNGEON_CHEST))
{
ChestGenHooks.addItem(category, new WeightedRandomChestContent(item, 0, 1, 3, rare));
}
}
}
|
Clean up for Dungeon loot code
|
Clean up for Dungeon loot code
|
Java
|
mit
|
TechReborn/TechReborn,drcrazy/TechReborn,Dimmerworld/TechReborn
|
java
|
## Code Before:
package techreborn.world;
import net.minecraft.item.ItemStack;
import net.minecraft.util.WeightedRandomChestContent;
import net.minecraftforge.common.ChestGenHooks;
import techreborn.items.ItemIngots;
public class DungeonLoot {
public static void init()
{
generate(ItemIngots.getIngotByName("steel"), 5);
}
public static void generate(ItemStack itemStack, int rare)
{
ChestGenHooks.getInfo(ChestGenHooks.DUNGEON_CHEST).addItem(new WeightedRandomChestContent(itemStack, itemStack.getItemDamage(), itemStack.stackSize, rare));
ChestGenHooks.getInfo(ChestGenHooks.MINESHAFT_CORRIDOR).addItem(new WeightedRandomChestContent(itemStack, itemStack.getItemDamage(), itemStack.stackSize, rare));
ChestGenHooks.getInfo(ChestGenHooks.PYRAMID_DESERT_CHEST).addItem(new WeightedRandomChestContent(itemStack ,itemStack.getItemDamage(), itemStack.stackSize, rare));
ChestGenHooks.getInfo(ChestGenHooks.STRONGHOLD_CORRIDOR).addItem(new WeightedRandomChestContent(itemStack ,itemStack.getItemDamage(), itemStack.stackSize, rare));
}
}
## Instruction:
Clean up for Dungeon loot code
## Code After:
package techreborn.world;
import java.util.Arrays;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.WeightedRandomChestContent;
import net.minecraftforge.common.ChestGenHooks;
import techreborn.items.ItemIngots;
public class DungeonLoot {
public static void init()
{
generate(ItemIngots.getIngotByName("steel").getItem(), 5);
}
public static void generate(Item item, int rare)
{
for (String category : Arrays.asList(ChestGenHooks.VILLAGE_BLACKSMITH, ChestGenHooks.MINESHAFT_CORRIDOR, ChestGenHooks.PYRAMID_DESERT_CHEST, ChestGenHooks.PYRAMID_JUNGLE_CHEST, ChestGenHooks.PYRAMID_JUNGLE_DISPENSER, ChestGenHooks.STRONGHOLD_CORRIDOR, ChestGenHooks.STRONGHOLD_LIBRARY, ChestGenHooks.STRONGHOLD_CROSSING, ChestGenHooks.BONUS_CHEST, ChestGenHooks.DUNGEON_CHEST))
{
ChestGenHooks.addItem(category, new WeightedRandomChestContent(item, 0, 1, 3, rare));
}
}
}
|
...
package techreborn.world;
import java.util.Arrays;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.WeightedRandomChestContent;
import net.minecraftforge.common.ChestGenHooks;
...
public static void init()
{
generate(ItemIngots.getIngotByName("steel").getItem(), 5);
}
public static void generate(Item item, int rare)
{
for (String category : Arrays.asList(ChestGenHooks.VILLAGE_BLACKSMITH, ChestGenHooks.MINESHAFT_CORRIDOR, ChestGenHooks.PYRAMID_DESERT_CHEST, ChestGenHooks.PYRAMID_JUNGLE_CHEST, ChestGenHooks.PYRAMID_JUNGLE_DISPENSER, ChestGenHooks.STRONGHOLD_CORRIDOR, ChestGenHooks.STRONGHOLD_LIBRARY, ChestGenHooks.STRONGHOLD_CROSSING, ChestGenHooks.BONUS_CHEST, ChestGenHooks.DUNGEON_CHEST))
{
ChestGenHooks.addItem(category, new WeightedRandomChestContent(item, 0, 1, 3, rare));
}
}
}
...
|
16dda42316176f0ad9c747731764855792fe88d6
|
lymph/utils/observables.py
|
lymph/utils/observables.py
|
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
|
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
Allow observing more than one action at once
|
Allow observing more than one action at once
|
Python
|
apache-2.0
|
lyudmildrx/lymph,mouadino/lymph,Drahflow/lymph,itakouna/lymph,vpikulik/lymph,deliveryhero/lymph,kstrempel/lymph,alazaro/lymph,lyudmildrx/lymph,itakouna/lymph,mamachanko/lymph,torte/lymph,mamachanko/lymph,lyudmildrx/lymph,alazaro/lymph,mouadino/lymph,mamachanko/lymph,mouadino/lymph,alazaro/lymph,itakouna/lymph,dushyant88/lymph
|
python
|
## Code Before:
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
## Instruction:
Allow observing more than one action at once
## Code After:
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
// ... existing code ...
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
// ... rest of the code ...
|
a18ae589f8217bc26bf1d4a8841c637354aedbaa
|
ispmgr/wwwdomain.py
|
ispmgr/wwwdomain.py
|
import json
import api
class WWWDomain(api.API):
def __init__(self, auth_handler):
self.url = auth_handler.url
self.sessid = auth_handler.sessid
self.func = 'wwwdomain.edit'
self.out = 'json'
self.params = {
'auth' : self.sessid,
'out' : self.out,
'func' : self.func,
}
def list(self, domain=None):
"""List all www domains. If domains is used, list details about this one."""
if domain:
self.params['elid'] = domain
else:
self.params['func'] = 'wwwdomain'
data = self.process_api(self.url, self.params)
out = json.load(data)
try:
return out['elem']
except KeyError:
return out
def add(self, domain='', owner='', admin='', ip='', **kwargs):
"""Add a new wwwdomain to configuration. If a DNS server is configurated, API adds
domain there too."""
self.params['sok'] = 'yes'
self.params['domain'] = domain
self.params['owner'] = owner
self.params['admin'] = admin
self.params['ip'] = ip
for key in kwargs:
self.params[key] = kwargs[key]
data = self.process_api(self.url, self.params)
out = json.load(data)
return out
|
import json
import api
class WWWDomain(api.API):
def __init__(self, auth_handler):
self.url = auth_handler.url
self.sessid = auth_handler.sessid
self.func = 'wwwdomain.edit'
self.out = 'json'
self._clear_params()
def _clear_params(self):
try:
self.params.clear()
except NameError:
pass
self.params = {
'auth' : self.sessid,
'out' : 'json',
'func' : self.func,
}
def list(self, domain=None):
"""List all www domains. If domains is used, list details about this one."""
self._clear_params()
if domain:
self.params['elid'] = domain
else:
self.params['func'] = 'wwwdomain'
data = self.process_api(self.url, self.params)
out = json.load(data)
try:
return out['elem']
except KeyError:
return out
def add(self, domain='', owner='', admin='', ip='', **kwargs):
"""Add a new wwwdomain to configuration. If a DNS server is configurated, API adds
domain there too."""
self._clear_params()
self.params['sok'] = 'yes'
self.params['domain'] = domain
self.params['owner'] = owner
self.params['admin'] = admin
self.params['ip'] = ip
for key in kwargs:
self.params[key] = kwargs[key]
data = self.process_api(self.url, self.params)
out = json.load(data)
return out
|
Clear parameters before editing/adding. Before it was been unpossible to call two functions in sequence.
|
Clear parameters before editing/adding. Before it was been unpossible to call two functions in sequence.
|
Python
|
mit
|
jakubjedelsky/python-ispmgr
|
python
|
## Code Before:
import json
import api
class WWWDomain(api.API):
def __init__(self, auth_handler):
self.url = auth_handler.url
self.sessid = auth_handler.sessid
self.func = 'wwwdomain.edit'
self.out = 'json'
self.params = {
'auth' : self.sessid,
'out' : self.out,
'func' : self.func,
}
def list(self, domain=None):
"""List all www domains. If domains is used, list details about this one."""
if domain:
self.params['elid'] = domain
else:
self.params['func'] = 'wwwdomain'
data = self.process_api(self.url, self.params)
out = json.load(data)
try:
return out['elem']
except KeyError:
return out
def add(self, domain='', owner='', admin='', ip='', **kwargs):
"""Add a new wwwdomain to configuration. If a DNS server is configurated, API adds
domain there too."""
self.params['sok'] = 'yes'
self.params['domain'] = domain
self.params['owner'] = owner
self.params['admin'] = admin
self.params['ip'] = ip
for key in kwargs:
self.params[key] = kwargs[key]
data = self.process_api(self.url, self.params)
out = json.load(data)
return out
## Instruction:
Clear parameters before editing/adding. Before it was been unpossible to call two functions in sequence.
## Code After:
import json
import api
class WWWDomain(api.API):
def __init__(self, auth_handler):
self.url = auth_handler.url
self.sessid = auth_handler.sessid
self.func = 'wwwdomain.edit'
self.out = 'json'
self._clear_params()
def _clear_params(self):
try:
self.params.clear()
except NameError:
pass
self.params = {
'auth' : self.sessid,
'out' : 'json',
'func' : self.func,
}
def list(self, domain=None):
"""List all www domains. If domains is used, list details about this one."""
self._clear_params()
if domain:
self.params['elid'] = domain
else:
self.params['func'] = 'wwwdomain'
data = self.process_api(self.url, self.params)
out = json.load(data)
try:
return out['elem']
except KeyError:
return out
def add(self, domain='', owner='', admin='', ip='', **kwargs):
"""Add a new wwwdomain to configuration. If a DNS server is configurated, API adds
domain there too."""
self._clear_params()
self.params['sok'] = 'yes'
self.params['domain'] = domain
self.params['owner'] = owner
self.params['admin'] = admin
self.params['ip'] = ip
for key in kwargs:
self.params[key] = kwargs[key]
data = self.process_api(self.url, self.params)
out = json.load(data)
return out
|
...
self.sessid = auth_handler.sessid
self.func = 'wwwdomain.edit'
self.out = 'json'
self._clear_params()
def _clear_params(self):
try:
self.params.clear()
except NameError:
pass
self.params = {
'auth' : self.sessid,
'out' : 'json',
'func' : self.func,
}
def list(self, domain=None):
"""List all www domains. If domains is used, list details about this one."""
self._clear_params()
if domain:
self.params['elid'] = domain
else:
...
def add(self, domain='', owner='', admin='', ip='', **kwargs):
"""Add a new wwwdomain to configuration. If a DNS server is configurated, API adds
domain there too."""
self._clear_params()
self.params['sok'] = 'yes'
self.params['domain'] = domain
self.params['owner'] = owner
...
|
66e16d6e3d80ab81967232d5d154c64c8e277def
|
robotpy_ext/misc/periodic_filter.py
|
robotpy_ext/misc/periodic_filter.py
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
Allow user to select bypass level
|
Allow user to select bypass level
|
Python
|
bsd-3-clause
|
Twinters007/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,robotpy/robotpy-wpilib-utilities,Twinters007/robotpy-wpilib-utilities
|
python
|
## Code Before:
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno > logging.INFO
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
## Instruction:
Allow user to select bypass level
## Code After:
import logging
import wpilib
class PeriodicFilter:
"""
Periodic Filter to help keep down clutter in the console.
Simply add this filter to your logger and the logger will
only print periodically.
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
now = wpilib.Timer.getFPGATimestamp()
self.loggingLoop = False
if now - self.__last_log > self.logging_interval:
self.loggingLoop = True
self.__last_log = now
|
...
The logger will always print logging levels of WARNING or higher
"""
def __init__(self, period, bypassLevel=logging.WARN):
'''
:param period: Wait period (in seconds) between logs
'''
...
self.period = period
self.loggingLoop = True
self._last_log = -period
self.bypassLevel = bypassLevel
def filter(self, record):
"""Performs filtering action for logger"""
self._refresh_logger()
return self.parent.loggingLoop or record.levelno >= self.bypassLevel
def _refresh_logger(self):
"""Determine if the log wait period has passed"""
...
|
b841954d3e11a0d626016ca709f4b4fd3ad75e8e
|
Classes/Categories/NSDate+GTTimeAdditions.h
|
Classes/Categories/NSDate+GTTimeAdditions.h
|
//
// NSDate+GTTimeAdditions.h
// ObjectiveGitFramework
//
// Created by Danny Greg on 27/03/2013.
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "git2.h"
@interface NSDate (GTTimeAdditions)
// Creates a new `NSDate` from the provided `git_time`.
//
// time - The `git_time` to base the returned date on.
// timeZone - The timezone used by the time passed in.
//
// Returns an `NSDate` object representing the passed in `time`.
+ (NSDate *)gt_dateFromGitTime:(git_time)time timeZone:(NSTimeZone **)timeZone;
// Converts the date to a `git_time`.
//
// timeZone - An `NSTimeZone` to describe the time offset. This is optional, if
// `nil` the default time zone will be used.
- (git_time)gt_gitTimeUsingTimeZone:(NSTimeZone *)timeZone;
@end
@interface NSTimeZone (GTTimeAdditions)
// The difference, in minutes, between the current default timezone and GMT.
@property (nonatomic, readonly) int gt_gitTimeOffset;
@end
|
//
// NSDate+GTTimeAdditions.h
// ObjectiveGitFramework
//
// Created by Danny Greg on 27/03/2013.
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "git2.h"
@interface NSDate (GTTimeAdditions)
// Creates a new `NSDate` from the provided `git_time`.
//
// time - The `git_time` to base the returned date on.
// timeZone - The timezone used by the time passed in. Optional.
//
// Returns an `NSDate` object representing the passed in `time`.
+ (NSDate *)gt_dateFromGitTime:(git_time)time timeZone:(NSTimeZone **)timeZone;
// Converts the date to a `git_time`.
//
// timeZone - An `NSTimeZone` to describe the time offset. This is optional, if
// `nil` the default time zone will be used.
- (git_time)gt_gitTimeUsingTimeZone:(NSTimeZone *)timeZone;
@end
@interface NSTimeZone (GTTimeAdditions)
// The difference, in minutes, between the current default timezone and GMT.
@property (nonatomic, readonly) int gt_gitTimeOffset;
@end
|
Document the timeZone as being optional.
|
Document the timeZone as being optional.
|
C
|
mit
|
tiennou/objective-git,blackpixel/objective-git,misterfifths/objective-git,javiertoledo/objective-git,Acidburn0zzz/objective-git,0x4a616e/objective-git,phatblat/objective-git,nerdishbynature/objective-git,TOMalley104/objective-git,c9s/objective-git,misterfifths/objective-git,javiertoledo/objective-git,c9s/objective-git,pietbrauer/objective-git,Acidburn0zzz/objective-git,0x4a616e/objective-git,misterfifths/objective-git,tiennou/objective-git,blackpixel/objective-git,dleehr/objective-git,blackpixel/objective-git,alehed/objective-git,nerdishbynature/objective-git,libgit2/objective-git,libgit2/objective-git,TOMalley104/objective-git,phatblat/objective-git,slavikus/objective-git,c9s/objective-git,javiertoledo/objective-git,libgit2/objective-git,Acidburn0zzz/objective-git,slavikus/objective-git,alehed/objective-git,0x4a616e/objective-git,dleehr/objective-git,c9s/objective-git,misterfifths/objective-git,dleehr/objective-git,alehed/objective-git,pietbrauer/objective-git,TOMalley104/objective-git,tiennou/objective-git,slavikus/objective-git,phatblat/objective-git,javiertoledo/objective-git,libgit2/objective-git,TOMalley104/objective-git,nerdishbynature/objective-git,dleehr/objective-git,pietbrauer/objective-git,blackpixel/objective-git,pietbrauer/objective-git,Acidburn0zzz/objective-git
|
c
|
## Code Before:
//
// NSDate+GTTimeAdditions.h
// ObjectiveGitFramework
//
// Created by Danny Greg on 27/03/2013.
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "git2.h"
@interface NSDate (GTTimeAdditions)
// Creates a new `NSDate` from the provided `git_time`.
//
// time - The `git_time` to base the returned date on.
// timeZone - The timezone used by the time passed in.
//
// Returns an `NSDate` object representing the passed in `time`.
+ (NSDate *)gt_dateFromGitTime:(git_time)time timeZone:(NSTimeZone **)timeZone;
// Converts the date to a `git_time`.
//
// timeZone - An `NSTimeZone` to describe the time offset. This is optional, if
// `nil` the default time zone will be used.
- (git_time)gt_gitTimeUsingTimeZone:(NSTimeZone *)timeZone;
@end
@interface NSTimeZone (GTTimeAdditions)
// The difference, in minutes, between the current default timezone and GMT.
@property (nonatomic, readonly) int gt_gitTimeOffset;
@end
## Instruction:
Document the timeZone as being optional.
## Code After:
//
// NSDate+GTTimeAdditions.h
// ObjectiveGitFramework
//
// Created by Danny Greg on 27/03/2013.
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "git2.h"
@interface NSDate (GTTimeAdditions)
// Creates a new `NSDate` from the provided `git_time`.
//
// time - The `git_time` to base the returned date on.
// timeZone - The timezone used by the time passed in. Optional.
//
// Returns an `NSDate` object representing the passed in `time`.
+ (NSDate *)gt_dateFromGitTime:(git_time)time timeZone:(NSTimeZone **)timeZone;
// Converts the date to a `git_time`.
//
// timeZone - An `NSTimeZone` to describe the time offset. This is optional, if
// `nil` the default time zone will be used.
- (git_time)gt_gitTimeUsingTimeZone:(NSTimeZone *)timeZone;
@end
@interface NSTimeZone (GTTimeAdditions)
// The difference, in minutes, between the current default timezone and GMT.
@property (nonatomic, readonly) int gt_gitTimeOffset;
@end
|
// ... existing code ...
// Creates a new `NSDate` from the provided `git_time`.
//
// time - The `git_time` to base the returned date on.
// timeZone - The timezone used by the time passed in. Optional.
//
// Returns an `NSDate` object representing the passed in `time`.
+ (NSDate *)gt_dateFromGitTime:(git_time)time timeZone:(NSTimeZone **)timeZone;
// ... rest of the code ...
|
55664832cbd78db3f7ce115d2cddfbe489eb5c17
|
src/test/java/de/retest/recheck/ignore/SearchFilterFilesTest.java
|
src/test/java/de/retest/recheck/ignore/SearchFilterFilesTest.java
|
package de.retest.recheck.ignore;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.util.List;
import org.junit.jupiter.api.Test;
class SearchFilterFilesTest {
@Test
void getDefaultFilterFiles_should_get_all_filter_files_from_classpath() {
final List<File> defaultFilterFiles = SearchFilterFiles.getDefaultFilterFiles();
assertThat( defaultFilterFiles.stream().map( File::getName ) ).contains( "positioning.filter",
"visibility.filter" );
}
}
|
package de.retest.recheck.ignore;
import static de.retest.recheck.configuration.ProjectConfiguration.FILTER_FOLDER;
import static de.retest.recheck.configuration.ProjectConfiguration.RETEST_PROJECT_CONFIG_FOLDER;
import static de.retest.recheck.configuration.ProjectConfiguration.RETEST_PROJECT_ROOT;
import static de.retest.recheck.ignore.SearchFilterFiles.FILES_ENDING;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import de.retest.recheck.util.junit.jupiter.SystemProperty;
class SearchFilterFilesTest {
@Test
void getDefaultFilterFiles_should_get_all_filter_files_from_classpath() {
final List<File> defaultFilterFiles = SearchFilterFiles.getDefaultFilterFiles();
assertThat( defaultFilterFiles.stream().map( File::getName ) ).contains( "positioning.filter",
"visibility.filter" );
}
@Test
@SystemProperty( key = RETEST_PROJECT_ROOT )
void getProjectFilterFiles_should_only_get_filter_files( @TempDir final Path temp ) throws Exception {
final Path configFolder = temp.resolve( RETEST_PROJECT_ROOT );
Files.createDirectory( configFolder );
final Path retestFolder = configFolder.resolve( RETEST_PROJECT_CONFIG_FOLDER );
Files.createDirectory( retestFolder );
final Path filterFolder = retestFolder.resolve( FILTER_FOLDER );
Files.createDirectory( filterFolder );
Files.createTempFile( filterFolder, "random", ".ignore" ).toFile();
final File colorFilter = Files.createTempFile( filterFolder, "color", FILES_ENDING ).toFile();
final File webFontFilter = Files.createTempFile( filterFolder, "web-font", FILES_ENDING ).toFile();
System.setProperty( RETEST_PROJECT_ROOT, filterFolder.toString() );
final List<File> projectFilterFiles = SearchFilterFiles.getProjectFilterFiles();
assertThat( projectFilterFiles ).allMatch( file -> file.toString().endsWith( FILES_ENDING ) );
assertThat( projectFilterFiles.stream().map( File::getName ) ).contains( colorFilter.getName().toString(),
webFontFilter.getName().toString() );
}
}
|
Test the behavior of the method
|
Test the behavior of the method
|
Java
|
agpl-3.0
|
retest/recheck,retest/recheck
|
java
|
## Code Before:
package de.retest.recheck.ignore;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.util.List;
import org.junit.jupiter.api.Test;
class SearchFilterFilesTest {
@Test
void getDefaultFilterFiles_should_get_all_filter_files_from_classpath() {
final List<File> defaultFilterFiles = SearchFilterFiles.getDefaultFilterFiles();
assertThat( defaultFilterFiles.stream().map( File::getName ) ).contains( "positioning.filter",
"visibility.filter" );
}
}
## Instruction:
Test the behavior of the method
## Code After:
package de.retest.recheck.ignore;
import static de.retest.recheck.configuration.ProjectConfiguration.FILTER_FOLDER;
import static de.retest.recheck.configuration.ProjectConfiguration.RETEST_PROJECT_CONFIG_FOLDER;
import static de.retest.recheck.configuration.ProjectConfiguration.RETEST_PROJECT_ROOT;
import static de.retest.recheck.ignore.SearchFilterFiles.FILES_ENDING;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import de.retest.recheck.util.junit.jupiter.SystemProperty;
class SearchFilterFilesTest {
@Test
void getDefaultFilterFiles_should_get_all_filter_files_from_classpath() {
final List<File> defaultFilterFiles = SearchFilterFiles.getDefaultFilterFiles();
assertThat( defaultFilterFiles.stream().map( File::getName ) ).contains( "positioning.filter",
"visibility.filter" );
}
@Test
@SystemProperty( key = RETEST_PROJECT_ROOT )
void getProjectFilterFiles_should_only_get_filter_files( @TempDir final Path temp ) throws Exception {
final Path configFolder = temp.resolve( RETEST_PROJECT_ROOT );
Files.createDirectory( configFolder );
final Path retestFolder = configFolder.resolve( RETEST_PROJECT_CONFIG_FOLDER );
Files.createDirectory( retestFolder );
final Path filterFolder = retestFolder.resolve( FILTER_FOLDER );
Files.createDirectory( filterFolder );
Files.createTempFile( filterFolder, "random", ".ignore" ).toFile();
final File colorFilter = Files.createTempFile( filterFolder, "color", FILES_ENDING ).toFile();
final File webFontFilter = Files.createTempFile( filterFolder, "web-font", FILES_ENDING ).toFile();
System.setProperty( RETEST_PROJECT_ROOT, filterFolder.toString() );
final List<File> projectFilterFiles = SearchFilterFiles.getProjectFilterFiles();
assertThat( projectFilterFiles ).allMatch( file -> file.toString().endsWith( FILES_ENDING ) );
assertThat( projectFilterFiles.stream().map( File::getName ) ).contains( colorFilter.getName().toString(),
webFontFilter.getName().toString() );
}
}
|
# ... existing code ...
package de.retest.recheck.ignore;
import static de.retest.recheck.configuration.ProjectConfiguration.FILTER_FOLDER;
import static de.retest.recheck.configuration.ProjectConfiguration.RETEST_PROJECT_CONFIG_FOLDER;
import static de.retest.recheck.configuration.ProjectConfiguration.RETEST_PROJECT_ROOT;
import static de.retest.recheck.ignore.SearchFilterFiles.FILES_ENDING;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import de.retest.recheck.util.junit.jupiter.SystemProperty;
class SearchFilterFilesTest {
# ... modified code ...
assertThat( defaultFilterFiles.stream().map( File::getName ) ).contains( "positioning.filter",
"visibility.filter" );
}
@Test
@SystemProperty( key = RETEST_PROJECT_ROOT )
void getProjectFilterFiles_should_only_get_filter_files( @TempDir final Path temp ) throws Exception {
final Path configFolder = temp.resolve( RETEST_PROJECT_ROOT );
Files.createDirectory( configFolder );
final Path retestFolder = configFolder.resolve( RETEST_PROJECT_CONFIG_FOLDER );
Files.createDirectory( retestFolder );
final Path filterFolder = retestFolder.resolve( FILTER_FOLDER );
Files.createDirectory( filterFolder );
Files.createTempFile( filterFolder, "random", ".ignore" ).toFile();
final File colorFilter = Files.createTempFile( filterFolder, "color", FILES_ENDING ).toFile();
final File webFontFilter = Files.createTempFile( filterFolder, "web-font", FILES_ENDING ).toFile();
System.setProperty( RETEST_PROJECT_ROOT, filterFolder.toString() );
final List<File> projectFilterFiles = SearchFilterFiles.getProjectFilterFiles();
assertThat( projectFilterFiles ).allMatch( file -> file.toString().endsWith( FILES_ENDING ) );
assertThat( projectFilterFiles.stream().map( File::getName ) ).contains( colorFilter.getName().toString(),
webFontFilter.getName().toString() );
}
}
# ... rest of the code ...
|
f02ddd7cb50ed93c072fac6da5d215532c416bf5
|
container/src/main/java/com/streamsets/pipeline/runner/preview/PreviewPipeline.java
|
container/src/main/java/com/streamsets/pipeline/runner/preview/PreviewPipeline.java
|
/**
* (c) 2014 StreamSets, Inc. All rights reserved. May not
* be copied, modified, or distributed in whole or part without
* written consent of StreamSets, Inc.
*/
package com.streamsets.pipeline.runner.preview;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.runner.Pipeline;
import com.streamsets.pipeline.runner.PipelineRuntimeException;
import com.streamsets.pipeline.runner.StageOutput;
import com.streamsets.pipeline.validation.Issues;
import java.util.Collections;
import java.util.List;
public class PreviewPipeline {
private final Pipeline pipeline;
private final Issues issues;
public PreviewPipeline(Pipeline pipeline, Issues issues) {
this.issues = issues;
this.pipeline = pipeline;
}
@SuppressWarnings("unchecked")
public PreviewPipelineOutput run() throws StageException, PipelineRuntimeException{
return run(Collections.EMPTY_LIST);
}
public PreviewPipelineOutput run(List<StageOutput> stageOutputsToOverride)
throws StageException, PipelineRuntimeException{
pipeline.init();
pipeline.run(stageOutputsToOverride);
pipeline.destroy();
return new PreviewPipelineOutput(issues, pipeline.getRunner());
}
}
|
/**
* (c) 2014 StreamSets, Inc. All rights reserved. May not
* be copied, modified, or distributed in whole or part without
* written consent of StreamSets, Inc.
*/
package com.streamsets.pipeline.runner.preview;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.runner.Pipeline;
import com.streamsets.pipeline.runner.PipelineRuntimeException;
import com.streamsets.pipeline.runner.StageOutput;
import com.streamsets.pipeline.validation.Issues;
import java.util.Collections;
import java.util.List;
public class PreviewPipeline {
private final Pipeline pipeline;
private final Issues issues;
public PreviewPipeline(Pipeline pipeline, Issues issues) {
this.issues = issues;
this.pipeline = pipeline;
}
@SuppressWarnings("unchecked")
public PreviewPipelineOutput run() throws StageException, PipelineRuntimeException{
return run(Collections.EMPTY_LIST);
}
public PreviewPipelineOutput run(List<StageOutput> stageOutputsToOverride)
throws StageException, PipelineRuntimeException{
pipeline.init();
try {
pipeline.run(stageOutputsToOverride);
} finally {
pipeline.destroy();
}
return new PreviewPipelineOutput(issues, pipeline.getRunner());
}
}
|
Handle exception and clean up during preview
|
Handle exception and clean up during preview
|
Java
|
apache-2.0
|
kunickiaj/datacollector,kiritbasu/datacollector,WgStreamsets/datacollector,z123/datacollector,rockmkd/datacollector,rockmkd/datacollector,kunickiaj/datacollector,studanshu/datacollector,z123/datacollector,SandishKumarHN/datacollector,kiritbasu/datacollector,WgStreamsets/datacollector,rockmkd/datacollector,z123/datacollector,streamsets/datacollector,z123/datacollector,streamsets/datacollector,studanshu/datacollector,studanshu/datacollector,kunickiaj/datacollector,streamsets/datacollector,studanshu/datacollector,kiritbasu/datacollector,streamsets/datacollector,SandishKumarHN/datacollector,WgStreamsets/datacollector,SandishKumarHN/datacollector,SandishKumarHN/datacollector,z123/datacollector,kiritbasu/datacollector,rockmkd/datacollector,kunickiaj/datacollector,studanshu/datacollector,rockmkd/datacollector,WgStreamsets/datacollector,kunickiaj/datacollector,kiritbasu/datacollector,WgStreamsets/datacollector,SandishKumarHN/datacollector,streamsets/datacollector
|
java
|
## Code Before:
/**
* (c) 2014 StreamSets, Inc. All rights reserved. May not
* be copied, modified, or distributed in whole or part without
* written consent of StreamSets, Inc.
*/
package com.streamsets.pipeline.runner.preview;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.runner.Pipeline;
import com.streamsets.pipeline.runner.PipelineRuntimeException;
import com.streamsets.pipeline.runner.StageOutput;
import com.streamsets.pipeline.validation.Issues;
import java.util.Collections;
import java.util.List;
public class PreviewPipeline {
private final Pipeline pipeline;
private final Issues issues;
public PreviewPipeline(Pipeline pipeline, Issues issues) {
this.issues = issues;
this.pipeline = pipeline;
}
@SuppressWarnings("unchecked")
public PreviewPipelineOutput run() throws StageException, PipelineRuntimeException{
return run(Collections.EMPTY_LIST);
}
public PreviewPipelineOutput run(List<StageOutput> stageOutputsToOverride)
throws StageException, PipelineRuntimeException{
pipeline.init();
pipeline.run(stageOutputsToOverride);
pipeline.destroy();
return new PreviewPipelineOutput(issues, pipeline.getRunner());
}
}
## Instruction:
Handle exception and clean up during preview
## Code After:
/**
* (c) 2014 StreamSets, Inc. All rights reserved. May not
* be copied, modified, or distributed in whole or part without
* written consent of StreamSets, Inc.
*/
package com.streamsets.pipeline.runner.preview;
import com.streamsets.pipeline.api.StageException;
import com.streamsets.pipeline.runner.Pipeline;
import com.streamsets.pipeline.runner.PipelineRuntimeException;
import com.streamsets.pipeline.runner.StageOutput;
import com.streamsets.pipeline.validation.Issues;
import java.util.Collections;
import java.util.List;
public class PreviewPipeline {
private final Pipeline pipeline;
private final Issues issues;
public PreviewPipeline(Pipeline pipeline, Issues issues) {
this.issues = issues;
this.pipeline = pipeline;
}
@SuppressWarnings("unchecked")
public PreviewPipelineOutput run() throws StageException, PipelineRuntimeException{
return run(Collections.EMPTY_LIST);
}
public PreviewPipelineOutput run(List<StageOutput> stageOutputsToOverride)
throws StageException, PipelineRuntimeException{
pipeline.init();
try {
pipeline.run(stageOutputsToOverride);
} finally {
pipeline.destroy();
}
return new PreviewPipelineOutput(issues, pipeline.getRunner());
}
}
|
...
public PreviewPipelineOutput run(List<StageOutput> stageOutputsToOverride)
throws StageException, PipelineRuntimeException{
pipeline.init();
try {
pipeline.run(stageOutputsToOverride);
} finally {
pipeline.destroy();
}
return new PreviewPipelineOutput(issues, pipeline.getRunner());
}
...
|
d5ee134b691b9b21e6ea42e0afb8660b73e00fb5
|
remoting/subsystem/src/main/java/org/jboss/as/remoting/Protocol.java
|
remoting/subsystem/src/main/java/org/jboss/as/remoting/Protocol.java
|
package org.jboss.as.remoting;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.jboss.dmr.ModelNode;
/**
* Protocols that can be used for a remoting connection
*
* @author Stuart Douglas
*/
public enum Protocol {
REMOTE("remote"),
HTTP_REMOTING("http-remoting"),
HTTPS_REMOTING("https-remoting");
private static final Map<String, Protocol> MAP;
static {
final Map<String, Protocol> map = new HashMap<String, Protocol>();
for (Protocol value : values()) {
map.put(value.localName, value);
}
MAP = map;
}
public static Protocol forName(String localName) {
final Protocol value = localName != null ? MAP.get(localName.toLowerCase()) : null;
return value == null ? Protocol.valueOf(localName.toUpperCase(Locale.ENGLISH)) : value;
}
private final String localName;
Protocol(final String localName) {
this.localName = localName;
}
@Override
public String toString() {
return localName;
}
public ModelNode toModelNode() {
return new ModelNode().set(toString());
}
}
|
package org.jboss.as.remoting;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.jboss.dmr.ModelNode;
/**
* Protocols that can be used for a remoting connection
*
* @author Stuart Douglas
*/
public enum Protocol {
REMOTE("remote"),
HTTP_REMOTING("http-remoting"),
HTTPS_REMOTING("https-remoting");
private static final Map<String, Protocol> MAP;
static {
final Map<String, Protocol> map = new HashMap<String, Protocol>();
for (Protocol value : values()) {
map.put(value.localName, value);
}
MAP = map;
}
public static Protocol forName(String localName) {
final Protocol value = localName != null ? MAP.get(localName.toLowerCase(Locale.ENGLISH)) : null;
return value == null && localName != null ? Protocol.valueOf(localName.toUpperCase(Locale.ENGLISH)) : value;
}
private final String localName;
Protocol(final String localName) {
this.localName = localName;
}
@Override
public String toString() {
return localName;
}
public ModelNode toModelNode() {
return new ModelNode().set(toString());
}
}
|
Fix NPE warning; use locale for toLowerCase
|
Fix NPE warning; use locale for toLowerCase
|
Java
|
lgpl-2.1
|
jamezp/wildfly-core,aloubyansky/wildfly-core,JiriOndrusek/wildfly-core,darranl/wildfly-core,soul2zimate/wildfly-core,jfdenise/wildfly-core,jfdenise/wildfly-core,jamezp/wildfly-core,yersan/wildfly-core,luck3y/wildfly-core,jfdenise/wildfly-core,darranl/wildfly-core,bstansberry/wildfly-core,bstansberry/wildfly-core,yersan/wildfly-core,ivassile/wildfly-core,jamezp/wildfly-core,ivassile/wildfly-core,JiriOndrusek/wildfly-core,luck3y/wildfly-core,soul2zimate/wildfly-core,darranl/wildfly-core,soul2zimate/wildfly-core,JiriOndrusek/wildfly-core,ivassile/wildfly-core,bstansberry/wildfly-core,aloubyansky/wildfly-core,yersan/wildfly-core,luck3y/wildfly-core,aloubyansky/wildfly-core
|
java
|
## Code Before:
package org.jboss.as.remoting;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.jboss.dmr.ModelNode;
/**
* Protocols that can be used for a remoting connection
*
* @author Stuart Douglas
*/
public enum Protocol {
REMOTE("remote"),
HTTP_REMOTING("http-remoting"),
HTTPS_REMOTING("https-remoting");
private static final Map<String, Protocol> MAP;
static {
final Map<String, Protocol> map = new HashMap<String, Protocol>();
for (Protocol value : values()) {
map.put(value.localName, value);
}
MAP = map;
}
public static Protocol forName(String localName) {
final Protocol value = localName != null ? MAP.get(localName.toLowerCase()) : null;
return value == null ? Protocol.valueOf(localName.toUpperCase(Locale.ENGLISH)) : value;
}
private final String localName;
Protocol(final String localName) {
this.localName = localName;
}
@Override
public String toString() {
return localName;
}
public ModelNode toModelNode() {
return new ModelNode().set(toString());
}
}
## Instruction:
Fix NPE warning; use locale for toLowerCase
## Code After:
package org.jboss.as.remoting;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.jboss.dmr.ModelNode;
/**
* Protocols that can be used for a remoting connection
*
* @author Stuart Douglas
*/
public enum Protocol {
REMOTE("remote"),
HTTP_REMOTING("http-remoting"),
HTTPS_REMOTING("https-remoting");
private static final Map<String, Protocol> MAP;
static {
final Map<String, Protocol> map = new HashMap<String, Protocol>();
for (Protocol value : values()) {
map.put(value.localName, value);
}
MAP = map;
}
public static Protocol forName(String localName) {
final Protocol value = localName != null ? MAP.get(localName.toLowerCase(Locale.ENGLISH)) : null;
return value == null && localName != null ? Protocol.valueOf(localName.toUpperCase(Locale.ENGLISH)) : value;
}
private final String localName;
Protocol(final String localName) {
this.localName = localName;
}
@Override
public String toString() {
return localName;
}
public ModelNode toModelNode() {
return new ModelNode().set(toString());
}
}
|
...
}
public static Protocol forName(String localName) {
final Protocol value = localName != null ? MAP.get(localName.toLowerCase(Locale.ENGLISH)) : null;
return value == null && localName != null ? Protocol.valueOf(localName.toUpperCase(Locale.ENGLISH)) : value;
}
private final String localName;
...
|
225ae01e3147bbee5c03462dad7dcfef22297f51
|
elevator/utils/patterns.py
|
elevator/utils/patterns.py
|
from collections import Sequence
# Enums beautiful python implementation
# Used like this :
# Numbers = enum('ZERO', 'ONE', 'TWO')
# >>> Numbers.ZERO
# 0
# >>> Numbers.ONE
# 1
# Found here: http://stackoverflow.com/questions/36932/whats-the-best-way-to-implement-an-enum-in-python
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, *args, **kw):
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kw)
return cls.instance
def __del__(cls, *args, **kw):
cls.instance is None
def destructurate(container):
class DestructurationError(Exception):
pass
if isinstance(container, Sequence):
return container[0], container[1:]
else:
raise DestructurationError("Can't destructurate a non-sequence container")
return container
|
from collections import Sequence
# Enums beautiful python implementation
# Used like this :
# Numbers = enum('ZERO', 'ONE', 'TWO')
# >>> Numbers.ZERO
# 0
# >>> Numbers.ONE
# 1
# Found here: http://stackoverflow.com/questions/36932/whats-the-best-way-to-implement-an-enum-in-python
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, *args, **kw):
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kw)
return cls.instance
def __del__(cls, *args, **kw):
cls.instance is None
class DestructurationError(Exception):
pass
def destructurate(container):
try:
return container[0], container[1:]
except (KeyError, AttributeError):
raise DestructurationError("Can't destructurate a non-sequence container")
|
Update : try/except in destructurate greatly enhances performances on mass read/write
|
Update : try/except in destructurate greatly enhances performances on mass read/write
|
Python
|
mit
|
oleiade/Elevator
|
python
|
## Code Before:
from collections import Sequence
# Enums beautiful python implementation
# Used like this :
# Numbers = enum('ZERO', 'ONE', 'TWO')
# >>> Numbers.ZERO
# 0
# >>> Numbers.ONE
# 1
# Found here: http://stackoverflow.com/questions/36932/whats-the-best-way-to-implement-an-enum-in-python
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, *args, **kw):
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kw)
return cls.instance
def __del__(cls, *args, **kw):
cls.instance is None
def destructurate(container):
class DestructurationError(Exception):
pass
if isinstance(container, Sequence):
return container[0], container[1:]
else:
raise DestructurationError("Can't destructurate a non-sequence container")
return container
## Instruction:
Update : try/except in destructurate greatly enhances performances on mass read/write
## Code After:
from collections import Sequence
# Enums beautiful python implementation
# Used like this :
# Numbers = enum('ZERO', 'ONE', 'TWO')
# >>> Numbers.ZERO
# 0
# >>> Numbers.ONE
# 1
# Found here: http://stackoverflow.com/questions/36932/whats-the-best-way-to-implement-an-enum-in-python
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, *args, **kw):
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kw)
return cls.instance
def __del__(cls, *args, **kw):
cls.instance is None
class DestructurationError(Exception):
pass
def destructurate(container):
try:
return container[0], container[1:]
except (KeyError, AttributeError):
raise DestructurationError("Can't destructurate a non-sequence container")
|
// ... existing code ...
cls.instance is None
class DestructurationError(Exception):
pass
def destructurate(container):
try:
return container[0], container[1:]
except (KeyError, AttributeError):
raise DestructurationError("Can't destructurate a non-sequence container")
// ... rest of the code ...
|
8bfe6e791228ccbc3143f3a8747c68d2e8b0cbb5
|
runtests.py
|
runtests.py
|
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
Fix running tests on lower Django versions
|
Fix running tests on lower Django versions
|
Python
|
apache-2.0
|
AdrianLC/django-parler-rest,edoburu/django-parler-rest
|
python
|
## Code Before:
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
## Instruction:
Fix running tests on lower Django versions
## Code After:
from django.conf import settings
from django.core.management import execute_from_command_line
import django
import os
import sys
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
def runtests():
argv = sys.argv[:1] + ['test', 'testproj'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
// ... existing code ...
if not settings.configured:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
if django.VERSION >= (1,7):
django.setup()
module_root = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, module_root)
// ... rest of the code ...
|
0ad6cb338bbf10c48049d5649b5cd41eab0ed8d1
|
prawcore/sessions.py
|
prawcore/sessions.py
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
Add optional authorizer parameter to session class and function.
|
Add optional authorizer parameter to session class and function.
|
Python
|
bsd-2-clause
|
praw-dev/prawcore
|
python
|
## Code Before:
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
## Instruction:
Add optional authorizer parameter to session class and function.
## Code After:
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
// ... existing code ...
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
// ... modified code ...
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
// ... rest of the code ...
|
9cfdb35fb1f645eda99d28085b093ee36dd14625
|
processors/closure_compiler.py
|
processors/closure_compiler.py
|
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
|
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
|
Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies
|
Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies
|
Python
|
bsd-2-clause
|
potatolondon/assetpipe
|
python
|
## Code Before:
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
## Instruction:
Use the ECMASCRIPT5_STRICT flag otherwise AngularJS dies
## Code After:
from collections import OrderedDict
import os
import StringIO
from django.conf import settings
from django.utils.encoding import smart_str
from ..base import Processor
from django.core.exceptions import ImproperlyConfigured
class ClosureCompiler(Processor):
def modify_expected_output_filenames(self, filenames):
return filenames
def process(self, inputs):
if not hasattr(settings, "CLOSURE_COMPILER_BINARY"):
raise ImproperlyConfigured("Please set the CLOSURE_COMPILER_BINARY setting")
from subprocess import Popen, PIPE
outputs = OrderedDict()
compressor = settings.CLOSURE_COMPILER_BINARY
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
output, error = cmd.communicate(smart_str(contents.read()))
file_out = StringIO.StringIO()
file_out.write(output)
file_out.seek(0)
outputs[filename] = file_out
except Exception, e:
raise ValueError("Failed to execute Java VM or closure. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"CLOSURE_COMPILER_BINARY in your settings correctly.\n"
"Error was: %s" % e)
return outputs
|
// ... existing code ...
try:
for filename, contents in inputs.items():
cmd = Popen([
'java', '-jar', compressor, "--language_in", "ECMASCRIPT5_STRICT"],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True
)
// ... rest of the code ...
|
e55b28bd92f29985b495b0b90b912f74fae3901f
|
transpiler/java/com/google/j2cl/ast/Statement.java
|
transpiler/java/com/google/j2cl/ast/Statement.java
|
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.ast;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.j2cl.ast.annotations.Visitable;
import com.google.j2cl.ast.processors.common.Processor;
import com.google.j2cl.common.SourcePosition;
/** A base class for Statement. */
@Visitable
public abstract class Statement extends Node implements HasSourcePosition, Cloneable<Statement> {
// unknown by default.
private SourcePosition sourcePosition;
public Statement(SourcePosition sourcePosition) {
setSourcePosition(sourcePosition);
}
@Override
public SourcePosition getSourcePosition() {
return sourcePosition;
}
public void setSourcePosition(SourcePosition sourcePosition) {
this.sourcePosition = checkNotNull(sourcePosition);
}
@Override
public abstract Statement clone();
@Override
public Node accept(Processor processor) {
return Visitor_Statement.visit(processor, this);
}
}
|
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.ast;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.j2cl.ast.annotations.Visitable;
import com.google.j2cl.ast.processors.common.Processor;
import com.google.j2cl.common.SourcePosition;
/** A base class for Statement. */
@Visitable
public abstract class Statement extends Node implements HasSourcePosition, Cloneable<Statement> {
// unknown by default.
private SourcePosition sourcePosition;
public Statement(SourcePosition sourcePosition) {
setSourcePosition(sourcePosition);
}
@Override
public final SourcePosition getSourcePosition() {
return sourcePosition;
}
public final void setSourcePosition(SourcePosition sourcePosition) {
this.sourcePosition = checkNotNull(sourcePosition);
}
@Override
public abstract Statement clone();
@Override
public Node accept(Processor processor) {
return Visitor_Statement.visit(processor, this);
}
}
|
Make set/getSourcePosition final on Node.
|
Make set/getSourcePosition final on Node.
PiperOrigin-RevId: 304549770
|
Java
|
apache-2.0
|
google/j2cl,google/j2cl,google/j2cl,google/j2cl,google/j2cl
|
java
|
## Code Before:
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.ast;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.j2cl.ast.annotations.Visitable;
import com.google.j2cl.ast.processors.common.Processor;
import com.google.j2cl.common.SourcePosition;
/** A base class for Statement. */
@Visitable
public abstract class Statement extends Node implements HasSourcePosition, Cloneable<Statement> {
// unknown by default.
private SourcePosition sourcePosition;
public Statement(SourcePosition sourcePosition) {
setSourcePosition(sourcePosition);
}
@Override
public SourcePosition getSourcePosition() {
return sourcePosition;
}
public void setSourcePosition(SourcePosition sourcePosition) {
this.sourcePosition = checkNotNull(sourcePosition);
}
@Override
public abstract Statement clone();
@Override
public Node accept(Processor processor) {
return Visitor_Statement.visit(processor, this);
}
}
## Instruction:
Make set/getSourcePosition final on Node.
PiperOrigin-RevId: 304549770
## Code After:
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.j2cl.ast;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.j2cl.ast.annotations.Visitable;
import com.google.j2cl.ast.processors.common.Processor;
import com.google.j2cl.common.SourcePosition;
/** A base class for Statement. */
@Visitable
public abstract class Statement extends Node implements HasSourcePosition, Cloneable<Statement> {
// unknown by default.
private SourcePosition sourcePosition;
public Statement(SourcePosition sourcePosition) {
setSourcePosition(sourcePosition);
}
@Override
public final SourcePosition getSourcePosition() {
return sourcePosition;
}
public final void setSourcePosition(SourcePosition sourcePosition) {
this.sourcePosition = checkNotNull(sourcePosition);
}
@Override
public abstract Statement clone();
@Override
public Node accept(Processor processor) {
return Visitor_Statement.visit(processor, this);
}
}
|
...
}
@Override
public final SourcePosition getSourcePosition() {
return sourcePosition;
}
public final void setSourcePosition(SourcePosition sourcePosition) {
this.sourcePosition = checkNotNull(sourcePosition);
}
...
|
58811f1f6a4204a1c59d197daa9fb5fb7f6b25de
|
src/dynamic_graph/sot/dynamics/solver.py
|
src/dynamic_graph/sot/dynamics/solver.py
|
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
|
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
Add a proxy method push in Solver -> Solver.sot.push.
|
Add a proxy method push in Solver -> Solver.sot.push.
|
Python
|
bsd-2-clause
|
stack-of-tasks/sot-dynamic-pinocchio,stack-of-tasks/sot-dynamic-pinocchio,stack-of-tasks/sot-dynamic-pinocchio
|
python
|
## Code Before:
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
## Instruction:
Add a proxy method push in Solver -> Solver.sot.push.
## Code After:
from dynamic_graph import plug
from dynamic_graph.sot.core import SOT
class Solver:
robot = None
sot = None
def __init__(self, robot):
self.robot = robot
self.sot = SOT('solver')
self.sot.signal('damping').value = 1e-6
self.sot.setNumberDofs(self.robot.dimension)
if robot.device:
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
|
...
plug(self.sot.signal('control'), robot.device.signal('control'))
plug(self.robot.device.state,
self.robot.dynamic.position)
def push(self, taskName):
"""
Proxy method to push a task in the sot
"""
self.sot.push(taskName)
...
|
987e3b3387124c9eee7b0d69647fe2eeba40b70d
|
snippets/list_holidays.py
|
snippets/list_holidays.py
|
import pandas as pd
from datetime import date
import holidays
def sanitize_holiday_name(name):
new_name = [c for c in name if c.isalpha() or c.isdigit() or c == ' ']
new_name = "".join(new_name).lower().replace(" ", "_")
return new_name
def process_holidays(df):
# Create a date object
user_date = date(df['year'], df['month'], df['day'])
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
for holiday_date, name in holidays_dates.iteritems():
# if 'observed' in name:
# pass
# Compute difference in days
days = (holiday_date - user_date).days
# Clean holiday name
name = sanitize_holiday_name(name)
# Add the computed days to holiday into our DataFrame
df['days_to_' + name] = days
return df
def main():
df = pd.DataFrame()
df['year'] = pd.Series(range(2010, 2015))
df['day'] = pd.Series(range(11, 27, 3))
df['month'] = pd.Series(range(2, 12, 2))
print df.apply(process_holidays, axis=1)
if __name__ == '__main__':
main()
|
import pandas as pd
from datetime import date
import holidays
def sanitize_holiday_name(name):
new_name = [c for c in name if c.isalpha() or c.isdigit() or c == ' ']
new_name = "".join(new_name).lower().replace(" ", "_")
return new_name
def process_holidays(df):
# Create a date object
user_date = date(
df['year_account_created'],
df['month_account_created'],
df['day_account_created']
)
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
for holiday_date, name in holidays_dates.iteritems():
# if 'observed' in name:
# pass
# Compute difference in days
days = (holiday_date - user_date).days
# Clean holiday name
name = sanitize_holiday_name(name)
# Add the computed days to holiday into our DataFrame
df['days_to_' + name] = days
return df
def main():
path = '../datasets/processed/'
train_users = pd.read_csv(path + 'processed_train_users.csv')
train_users = train_users.head(500)
test_users = pd.read_csv(path + 'processed_train_users.csv')
test_users = test_users.head(500)
train_users = train_users.apply(process_holidays, axis=1)
print train_users.columns
if __name__ == '__main__':
main()
|
Update with real train users
|
Update with real train users
|
Python
|
mit
|
davidgasquez/kaggle-airbnb
|
python
|
## Code Before:
import pandas as pd
from datetime import date
import holidays
def sanitize_holiday_name(name):
new_name = [c for c in name if c.isalpha() or c.isdigit() or c == ' ']
new_name = "".join(new_name).lower().replace(" ", "_")
return new_name
def process_holidays(df):
# Create a date object
user_date = date(df['year'], df['month'], df['day'])
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
for holiday_date, name in holidays_dates.iteritems():
# if 'observed' in name:
# pass
# Compute difference in days
days = (holiday_date - user_date).days
# Clean holiday name
name = sanitize_holiday_name(name)
# Add the computed days to holiday into our DataFrame
df['days_to_' + name] = days
return df
def main():
df = pd.DataFrame()
df['year'] = pd.Series(range(2010, 2015))
df['day'] = pd.Series(range(11, 27, 3))
df['month'] = pd.Series(range(2, 12, 2))
print df.apply(process_holidays, axis=1)
if __name__ == '__main__':
main()
## Instruction:
Update with real train users
## Code After:
import pandas as pd
from datetime import date
import holidays
def sanitize_holiday_name(name):
new_name = [c for c in name if c.isalpha() or c.isdigit() or c == ' ']
new_name = "".join(new_name).lower().replace(" ", "_")
return new_name
def process_holidays(df):
# Create a date object
user_date = date(
df['year_account_created'],
df['month_account_created'],
df['day_account_created']
)
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
for holiday_date, name in holidays_dates.iteritems():
# if 'observed' in name:
# pass
# Compute difference in days
days = (holiday_date - user_date).days
# Clean holiday name
name = sanitize_holiday_name(name)
# Add the computed days to holiday into our DataFrame
df['days_to_' + name] = days
return df
def main():
path = '../datasets/processed/'
train_users = pd.read_csv(path + 'processed_train_users.csv')
train_users = train_users.head(500)
test_users = pd.read_csv(path + 'processed_train_users.csv')
test_users = test_users.head(500)
train_users = train_users.apply(process_holidays, axis=1)
print train_users.columns
if __name__ == '__main__':
main()
|
// ... existing code ...
def process_holidays(df):
# Create a date object
user_date = date(
df['year_account_created'],
df['month_account_created'],
df['day_account_created']
)
# Get US holidays for this year
holidays_dates = holidays.US(years=df['year'])
// ... modified code ...
def main():
path = '../datasets/processed/'
train_users = pd.read_csv(path + 'processed_train_users.csv')
train_users = train_users.head(500)
test_users = pd.read_csv(path + 'processed_train_users.csv')
test_users = test_users.head(500)
train_users = train_users.apply(process_holidays, axis=1)
print train_users.columns
if __name__ == '__main__':
// ... rest of the code ...
|
bb77b9554108c6a9739dd058a12484d15f10d3a2
|
candidates/views/helpers.py
|
candidates/views/helpers.py
|
from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import HttpResponseRedirect
from slugify import slugify
from ..election_specific import AREA_POST_DATA
from ..models import (
PopItPerson, membership_covers_date
)
def get_redirect_to_post(election, post_data):
short_post_label = AREA_POST_DATA.shorten_post_label(
election, post_data['label']
)
return HttpResponseRedirect(
reverse(
'constituency',
kwargs={
'election': election,
'post_id': post_data['id'],
'ignored_slug': slugify(short_post_label),
}
)
)
def get_people_from_memberships(election_data, memberships):
current_candidates = set()
past_candidates = set()
for membership in memberships:
if not membership.get('role') == election_data['candidate_membership_role']:
continue
person = PopItPerson.create_from_dict(membership['person_id'])
if membership_covers_date(
membership,
election_data['election_date']
):
current_candidates.add(person)
else:
for election, election_data in settings.ELECTIONS_BY_DATE:
if not election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
election_data['election_date'],
):
past_candidates.add(person)
return current_candidates, past_candidates
|
from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import HttpResponseRedirect
from slugify import slugify
from ..election_specific import AREA_POST_DATA
from ..models import (
PopItPerson, membership_covers_date
)
def get_redirect_to_post(election, post_data):
short_post_label = AREA_POST_DATA.shorten_post_label(
election, post_data['label']
)
return HttpResponseRedirect(
reverse(
'constituency',
kwargs={
'election': election,
'post_id': post_data['id'],
'ignored_slug': slugify(short_post_label),
}
)
)
def get_people_from_memberships(election_data, memberships):
current_candidates = set()
past_candidates = set()
for membership in memberships:
if not membership.get('role') == election_data['candidate_membership_role']:
continue
person = PopItPerson.create_from_dict(membership['person_id'])
if membership_covers_date(
membership,
election_data['election_date']
):
current_candidates.add(person)
else:
for other_election, other_election_data in settings.ELECTIONS_BY_DATE:
if not other_election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
other_election_data['election_date'],
):
past_candidates.add(person)
return current_candidates, past_candidates
|
Fix a stupid bug in get_people_from_memberships
|
Fix a stupid bug in get_people_from_memberships
The rebinding of the function's election_data parameter was breaking
the listing of candidates for a post.
|
Python
|
agpl-3.0
|
openstate/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,datamade/yournextmp-popit,openstate/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,DemocracyClub/yournextrepresentative
|
python
|
## Code Before:
from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import HttpResponseRedirect
from slugify import slugify
from ..election_specific import AREA_POST_DATA
from ..models import (
PopItPerson, membership_covers_date
)
def get_redirect_to_post(election, post_data):
short_post_label = AREA_POST_DATA.shorten_post_label(
election, post_data['label']
)
return HttpResponseRedirect(
reverse(
'constituency',
kwargs={
'election': election,
'post_id': post_data['id'],
'ignored_slug': slugify(short_post_label),
}
)
)
def get_people_from_memberships(election_data, memberships):
current_candidates = set()
past_candidates = set()
for membership in memberships:
if not membership.get('role') == election_data['candidate_membership_role']:
continue
person = PopItPerson.create_from_dict(membership['person_id'])
if membership_covers_date(
membership,
election_data['election_date']
):
current_candidates.add(person)
else:
for election, election_data in settings.ELECTIONS_BY_DATE:
if not election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
election_data['election_date'],
):
past_candidates.add(person)
return current_candidates, past_candidates
## Instruction:
Fix a stupid bug in get_people_from_memberships
The rebinding of the function's election_data parameter was breaking
the listing of candidates for a post.
## Code After:
from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import HttpResponseRedirect
from slugify import slugify
from ..election_specific import AREA_POST_DATA
from ..models import (
PopItPerson, membership_covers_date
)
def get_redirect_to_post(election, post_data):
short_post_label = AREA_POST_DATA.shorten_post_label(
election, post_data['label']
)
return HttpResponseRedirect(
reverse(
'constituency',
kwargs={
'election': election,
'post_id': post_data['id'],
'ignored_slug': slugify(short_post_label),
}
)
)
def get_people_from_memberships(election_data, memberships):
current_candidates = set()
past_candidates = set()
for membership in memberships:
if not membership.get('role') == election_data['candidate_membership_role']:
continue
person = PopItPerson.create_from_dict(membership['person_id'])
if membership_covers_date(
membership,
election_data['election_date']
):
current_candidates.add(person)
else:
for other_election, other_election_data in settings.ELECTIONS_BY_DATE:
if not other_election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
other_election_data['election_date'],
):
past_candidates.add(person)
return current_candidates, past_candidates
|
// ... existing code ...
):
current_candidates.add(person)
else:
for other_election, other_election_data in settings.ELECTIONS_BY_DATE:
if not other_election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
other_election_data['election_date'],
):
past_candidates.add(person)
// ... rest of the code ...
|
f15e2f9457fdd6ccc32ac2d8bd8d2c361f219e97
|
tests/test_bib.py
|
tests/test_bib.py
|
import pytest
from bibtextomd.bib import main, reorder
|
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
Add several tests for the name highlighting function
|
Add several tests for the name highlighting function
|
Python
|
mit
|
bryanwweber/bibtextomd
|
python
|
## Code Before:
import pytest
from bibtextomd.bib import main, reorder
## Instruction:
Add several tests for the name highlighting function
## Code After:
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
|
# ... existing code ...
import pytest
from bibtextomd.bib import main, reorder
class TestReorder():
def test_single_author_good(self):
names = 'Author, First A.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**'
def test_two_authors_good(self):
names = 'Author, First A. and Name, Second N.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author** and S.N. Name'
def test_three_authors_good(self):
names = 'Author, First A. and Name, Second N. and Name, Unicode C.'
n = reorder(names, 'F.A. Author')
assert n == '**F.A. Author**, S.N. Name, and U.C. Name'
def test_unicode_good(self):
names = 'Namé, Unicode C.'
n = reorder(names, 'U.C. Namé')
assert n == '**U.C. Namé**'
def test_missing_name(self):
names = 'Author, First A.'
with pytest.warns(UserWarning):
reorder(names, 'Missing Author')
def test_no_highlighted_name(self):
names = 'Author, First A.'
n = reorder(names, None)
assert n == 'F.A. Author'
def test_hyphenated_name(self):
names = 'Name, Hypen-Ated'
n = reorder(names, None)
assert n == 'H.A. Name'
# ... rest of the code ...
|
bf753d3f58d9a141fe35741931c31504b835e97a
|
jutils/src/main/java/com/valencia/jutils/jvm/Platform.java
|
jutils/src/main/java/com/valencia/jutils/jvm/Platform.java
|
package com.valencia.jutils.jvm;
/**
*
*/
/**
* Used for introspecting the current OS platform.
*
* @author [email protected]
*/
public enum Platform {
WINDOWS, LINUX, MAC_OS, AIX,
;
public static Platform get() {
String os = System.getProperty("os.name");
if (os.toLowerCase().contains("win")) {
return Platform.WINDOWS;
}
if (os.equals("Mac OS X")) {
return Platform.MAC_OS;
}
return Platform.LINUX;
}
}
|
package com.valencia.jutils.jvm;
/**
*
*/
/**
* Used for introspecting the current OS platform.
*
* @author [email protected]
*/
public enum Platform {
WINDOWS, LINUX, MAC_OS, AIX,
/**
* Used to indicate any platform.
*/
ALL,
;
public static Platform get() {
String os = System.getProperty("os.name");
if (os.toLowerCase().contains("win")) {
return Platform.WINDOWS;
}
if (os.equals("Mac OS X")) {
return Platform.MAC_OS;
}
return Platform.LINUX;
}
}
|
Add any platform enum constant.
|
Add any platform enum constant.
|
Java
|
apache-2.0
|
gee4vee/jutils
|
java
|
## Code Before:
package com.valencia.jutils.jvm;
/**
*
*/
/**
* Used for introspecting the current OS platform.
*
* @author [email protected]
*/
public enum Platform {
WINDOWS, LINUX, MAC_OS, AIX,
;
public static Platform get() {
String os = System.getProperty("os.name");
if (os.toLowerCase().contains("win")) {
return Platform.WINDOWS;
}
if (os.equals("Mac OS X")) {
return Platform.MAC_OS;
}
return Platform.LINUX;
}
}
## Instruction:
Add any platform enum constant.
## Code After:
package com.valencia.jutils.jvm;
/**
*
*/
/**
* Used for introspecting the current OS platform.
*
* @author [email protected]
*/
public enum Platform {
WINDOWS, LINUX, MAC_OS, AIX,
/**
* Used to indicate any platform.
*/
ALL,
;
public static Platform get() {
String os = System.getProperty("os.name");
if (os.toLowerCase().contains("win")) {
return Platform.WINDOWS;
}
if (os.equals("Mac OS X")) {
return Platform.MAC_OS;
}
return Platform.LINUX;
}
}
|
// ... existing code ...
*/
public enum Platform {
WINDOWS, LINUX, MAC_OS, AIX,
/**
* Used to indicate any platform.
*/
ALL,
;
public static Platform get() {
// ... rest of the code ...
|
bbf3d68b9566a826f404aa1ab3da198d765dca58
|
contacts/rules.py
|
contacts/rules.py
|
ALLOWED_FIELDS = [
'name',
'first_name',
'last_name',
'phone_number',
'photo',
'email',
'twitter'
]
|
ALLOWED_FIELDS = [
'name',
'phone_number',
'first_name',
'last_name',
'phone_number',
'photo',
'email',
'twitter'
]
|
Add 'phone_number' field to ALLOWED_FIELDS.
|
Add 'phone_number' field to ALLOWED_FIELDS.
|
Python
|
mit
|
heimann/contacts
|
python
|
## Code Before:
ALLOWED_FIELDS = [
'name',
'first_name',
'last_name',
'phone_number',
'photo',
'email',
'twitter'
]
## Instruction:
Add 'phone_number' field to ALLOWED_FIELDS.
## Code After:
ALLOWED_FIELDS = [
'name',
'phone_number',
'first_name',
'last_name',
'phone_number',
'photo',
'email',
'twitter'
]
|
// ... existing code ...
ALLOWED_FIELDS = [
'name',
'phone_number',
'first_name',
'last_name',
'phone_number',
// ... rest of the code ...
|
d83ed858dab0991e4829a7f249260ae1f1140b41
|
rave/main.py
|
rave/main.py
|
import rave.events
import rave.modularity
import rave.backends
import rave.resources
import rave.rendering
def init_game(game):
rave.events.emit('game.init', game)
with game.env:
rave.modularity.load_all()
rave.backends.select_all()
def run_game(game):
running = True
# Stop the event loop when a stop event was caught.
def stop(event):
nonlocal running
running = False
game.events.hook('game.stop', stop)
rave.events.emit('game.start', game)
with game.env:
# Typical handle events -> update game state -> render loop.
while running:
with game.active_lock:
# Suspend main loop while lock is active: useful for when the OS requests an application suspend.
pass
rave.backends.handle_events(game)
if game.mixer:
game.mixer.render(None)
if game.window:
game.window.render(None)
|
import rave.events
import rave.modularity
import rave.backends
import rave.resources
import rave.rendering
def init_game(game):
rave.modularity.load_all()
rave.events.emit('game.init', game)
with game.env:
rave.backends.select_all()
def run_game(game):
running = True
# Stop the event loop when a stop event was caught.
def stop(event):
nonlocal running
running = False
game.events.hook('game.stop', stop)
rave.events.emit('game.start', game)
with game.env:
# Typical handle events -> update game state -> render loop.
while running:
with game.active_lock:
# Suspend main loop while lock is active: useful for when the OS requests an application suspend.
pass
rave.backends.handle_events(game)
if game.mixer:
game.mixer.render(None)
if game.window:
game.window.render(None)
|
Load modules in engine context.
|
core: Load modules in engine context.
|
Python
|
bsd-2-clause
|
rave-engine/rave
|
python
|
## Code Before:
import rave.events
import rave.modularity
import rave.backends
import rave.resources
import rave.rendering
def init_game(game):
rave.events.emit('game.init', game)
with game.env:
rave.modularity.load_all()
rave.backends.select_all()
def run_game(game):
running = True
# Stop the event loop when a stop event was caught.
def stop(event):
nonlocal running
running = False
game.events.hook('game.stop', stop)
rave.events.emit('game.start', game)
with game.env:
# Typical handle events -> update game state -> render loop.
while running:
with game.active_lock:
# Suspend main loop while lock is active: useful for when the OS requests an application suspend.
pass
rave.backends.handle_events(game)
if game.mixer:
game.mixer.render(None)
if game.window:
game.window.render(None)
## Instruction:
core: Load modules in engine context.
## Code After:
import rave.events
import rave.modularity
import rave.backends
import rave.resources
import rave.rendering
def init_game(game):
rave.modularity.load_all()
rave.events.emit('game.init', game)
with game.env:
rave.backends.select_all()
def run_game(game):
running = True
# Stop the event loop when a stop event was caught.
def stop(event):
nonlocal running
running = False
game.events.hook('game.stop', stop)
rave.events.emit('game.start', game)
with game.env:
# Typical handle events -> update game state -> render loop.
while running:
with game.active_lock:
# Suspend main loop while lock is active: useful for when the OS requests an application suspend.
pass
rave.backends.handle_events(game)
if game.mixer:
game.mixer.render(None)
if game.window:
game.window.render(None)
|
// ... existing code ...
def init_game(game):
rave.modularity.load_all()
rave.events.emit('game.init', game)
with game.env:
rave.backends.select_all()
// ... rest of the code ...
|
e6b6aa2803a27703dfebe82161baacaff879dbc3
|
build-logic/src/main/kotlin/local/java-library.gradle.kts
|
build-logic/src/main/kotlin/local/java-library.gradle.kts
|
package local
plugins {
id("local.java-base")
`java-library`
id("net.ltgt.errorprone")
id("net.ltgt.nullaway")
}
java.sourceCompatibility = JavaVersion.VERSION_1_8
if (JavaVersion.current().isJava9Compatible) {
tasks.withType<JavaCompile>().configureEach {
options.release.set(java.targetCompatibility.majorVersion.toInt())
}
}
dependencies {
errorprone(project.the<VersionCatalogsExtension>().named("libs").findBundle("errorprone").orElseThrow())
errorproneJavac("com.google.errorprone:javac:9+181-r4173-1")
}
tasks {
withType<JavaCompile>().configureEach {
options.compilerArgs.addAll(listOf("-Werror", "-Xlint:all,-processing"))
}
javadoc {
(options as CoreJavadocOptions).addBooleanOption("Xdoclint:all,-missing", true)
if (JavaVersion.current().isJava9Compatible) {
(options as CoreJavadocOptions).addBooleanOption("html5", true)
}
}
}
|
package local
plugins {
id("local.java-base")
`java-library`
id("net.ltgt.errorprone")
id("net.ltgt.nullaway")
}
tasks.withType<JavaCompile>().configureEach {
options.release.set(8)
}
dependencies {
errorprone(project.the<VersionCatalogsExtension>().named("libs").findBundle("errorprone").orElseThrow())
errorproneJavac("com.google.errorprone:javac:9+181-r4173-1")
}
tasks {
withType<JavaCompile>().configureEach {
options.compilerArgs.addAll(listOf("-Werror", "-Xlint:all,-processing"))
}
javadoc {
(options as CoreJavadocOptions).addBooleanOption("Xdoclint:all,-missing", true)
if (JavaVersion.current().isJava9Compatible) {
(options as CoreJavadocOptions).addBooleanOption("html5", true)
}
}
}
|
Remove code that supports running the build with JDK 8
|
Remove code that supports running the build with JDK 8
Addition of version catalog with use of java.util.Optional
already required JDK 11 in practice anyway.
Tests can still be run with JDK 8 though.
|
Kotlin
|
apache-2.0
|
tbroyer/gradle-incap-helper,tbroyer/gradle-incap-helper
|
kotlin
|
## Code Before:
package local
plugins {
id("local.java-base")
`java-library`
id("net.ltgt.errorprone")
id("net.ltgt.nullaway")
}
java.sourceCompatibility = JavaVersion.VERSION_1_8
if (JavaVersion.current().isJava9Compatible) {
tasks.withType<JavaCompile>().configureEach {
options.release.set(java.targetCompatibility.majorVersion.toInt())
}
}
dependencies {
errorprone(project.the<VersionCatalogsExtension>().named("libs").findBundle("errorprone").orElseThrow())
errorproneJavac("com.google.errorprone:javac:9+181-r4173-1")
}
tasks {
withType<JavaCompile>().configureEach {
options.compilerArgs.addAll(listOf("-Werror", "-Xlint:all,-processing"))
}
javadoc {
(options as CoreJavadocOptions).addBooleanOption("Xdoclint:all,-missing", true)
if (JavaVersion.current().isJava9Compatible) {
(options as CoreJavadocOptions).addBooleanOption("html5", true)
}
}
}
## Instruction:
Remove code that supports running the build with JDK 8
Addition of version catalog with use of java.util.Optional
already required JDK 11 in practice anyway.
Tests can still be run with JDK 8 though.
## Code After:
package local
plugins {
id("local.java-base")
`java-library`
id("net.ltgt.errorprone")
id("net.ltgt.nullaway")
}
tasks.withType<JavaCompile>().configureEach {
options.release.set(8)
}
dependencies {
errorprone(project.the<VersionCatalogsExtension>().named("libs").findBundle("errorprone").orElseThrow())
errorproneJavac("com.google.errorprone:javac:9+181-r4173-1")
}
tasks {
withType<JavaCompile>().configureEach {
options.compilerArgs.addAll(listOf("-Werror", "-Xlint:all,-processing"))
}
javadoc {
(options as CoreJavadocOptions).addBooleanOption("Xdoclint:all,-missing", true)
if (JavaVersion.current().isJava9Compatible) {
(options as CoreJavadocOptions).addBooleanOption("html5", true)
}
}
}
|
...
id("net.ltgt.nullaway")
}
tasks.withType<JavaCompile>().configureEach {
options.release.set(8)
}
dependencies {
...
|
6153f18bda4dcf3601df91b60787453af1517b78
|
falmer/auth/types.py
|
falmer/auth/types.py
|
import graphene
from django.contrib.auth.models import Permission as DjangoPermission
from graphene_django import DjangoObjectType
from . import models
class ClientUser(DjangoObjectType):
name = graphene.String()
has_cms_access = graphene.Boolean()
user_id = graphene.Int()
permissions = graphene.List(graphene.Int)
class Meta:
model = models.FalmerUser
fields = (
'id',
'name',
)
def resolve_name(self, info):
return self.get_full_name()
def resolve_user_id(self, info):
return self.pk
# this is a quick hack until we work on permissions etc
def resolve_has_cms_access(self, info):
return self.has_perm('wagtailadmin.access_admin')
def resolve_permissions(self, info):
return self.get_permissions()
class Permission(DjangoObjectType):
content_type = graphene.String()
class Meta:
model = DjangoPermission
fields = (
'content_type',
)
def resolve_content_type(self, info):
return self.content_type.app_label
|
import graphene
from django.contrib.auth.models import Permission as DjangoPermission
from graphene_django import DjangoObjectType
from . import models
class ClientUser(DjangoObjectType):
name = graphene.String()
has_cms_access = graphene.Boolean()
user_id = graphene.Int()
permissions = graphene.List(graphene.Int)
class Meta:
model = models.FalmerUser
fields = (
'id',
'name',
)
def resolve_name(self, info):
return self.get_full_name()
def resolve_user_id(self, info):
return self.pk
# this is a quick hack until we work on permissions etc
def resolve_has_cms_access(self, info):
return self.has_perm('wagtailadmin.access_admin')
def resolve_permissions(self, info):
return self.get_permissions()
class Permission(DjangoObjectType):
content_type = graphene.String()
class Meta:
model = DjangoPermission
fields = (
'id',
'name',
'codename',
'content_type',
)
def resolve_content_type(self, info):
return self.content_type.app_label
|
Add additional fields to permission type
|
Add additional fields to permission type
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
python
|
## Code Before:
import graphene
from django.contrib.auth.models import Permission as DjangoPermission
from graphene_django import DjangoObjectType
from . import models
class ClientUser(DjangoObjectType):
name = graphene.String()
has_cms_access = graphene.Boolean()
user_id = graphene.Int()
permissions = graphene.List(graphene.Int)
class Meta:
model = models.FalmerUser
fields = (
'id',
'name',
)
def resolve_name(self, info):
return self.get_full_name()
def resolve_user_id(self, info):
return self.pk
# this is a quick hack until we work on permissions etc
def resolve_has_cms_access(self, info):
return self.has_perm('wagtailadmin.access_admin')
def resolve_permissions(self, info):
return self.get_permissions()
class Permission(DjangoObjectType):
content_type = graphene.String()
class Meta:
model = DjangoPermission
fields = (
'content_type',
)
def resolve_content_type(self, info):
return self.content_type.app_label
## Instruction:
Add additional fields to permission type
## Code After:
import graphene
from django.contrib.auth.models import Permission as DjangoPermission
from graphene_django import DjangoObjectType
from . import models
class ClientUser(DjangoObjectType):
name = graphene.String()
has_cms_access = graphene.Boolean()
user_id = graphene.Int()
permissions = graphene.List(graphene.Int)
class Meta:
model = models.FalmerUser
fields = (
'id',
'name',
)
def resolve_name(self, info):
return self.get_full_name()
def resolve_user_id(self, info):
return self.pk
# this is a quick hack until we work on permissions etc
def resolve_has_cms_access(self, info):
return self.has_perm('wagtailadmin.access_admin')
def resolve_permissions(self, info):
return self.get_permissions()
class Permission(DjangoObjectType):
content_type = graphene.String()
class Meta:
model = DjangoPermission
fields = (
'id',
'name',
'codename',
'content_type',
)
def resolve_content_type(self, info):
return self.content_type.app_label
|
...
class Meta:
model = DjangoPermission
fields = (
'id',
'name',
'codename',
'content_type',
)
...
|
07710f97883452cbe472ae9735700773aa59f492
|
falmer/content/models/selection_grid.py
|
falmer/content/models/selection_grid.py
|
from wagtail.core import blocks
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
class Meta:
icon = 'item'
class SelectionGridPage(Page):
parent_page_types = []
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
Add description to selectiongrid items
|
Add description to selectiongrid items
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
python
|
## Code Before:
from wagtail.core import blocks
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
class Meta:
icon = 'item'
class SelectionGridPage(Page):
parent_page_types = []
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
## Instruction:
Add description to selectiongrid items
## Code After:
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
...
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
...
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
...
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
...
|
a9c0f4fad89d31ef835100ee96bb7640ea635d57
|
src/main/java/seedu/address/logic/parser/ListCommandParser.java
|
src/main/java/seedu/address/logic/parser/ListCommandParser.java
|
package seedu.address.logic.parser;
import static seedu.address.logic.parser.CliSyntax.KEYWORDS_ARGS_FORMAT_LIST;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import seedu.address.logic.commands.Command;
import seedu.address.logic.commands.ListCommand;
/**
* Parses input arguments and creates a new ListCommand object
*/
public class ListCommandParser {
private ListCommandParser() {
}
/**
* Parses the given {@code String} of arguments in the context of the FindCommand
* and returns an FindCommand object for execution.
*/
public static Command parse(String args) {
if (args == null) {
return new ListCommand();
}
// if not match, show all the unfinished task
final Matcher matcher = KEYWORDS_ARGS_FORMAT_LIST.matcher(args.trim());
if (!matcher.matches()) {
return new ListCommand();
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new ListCommand(keywordSet);
}
}
|
package seedu.address.logic.parser;
import static seedu.address.logic.parser.CliSyntax.KEYWORDS_ARGS_FORMAT_LIST;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import seedu.address.logic.commands.Command;
import seedu.address.logic.commands.ListCommand;
/**
* Parses input arguments and creates a new ListCommand object
*/
public class ListCommandParser {
private ListCommandParser() {
}
private static final String LIST_SEPARATOR = "\\s+";
/**
* Parses the given {@code String} of arguments in the context of the FindCommand
* and returns an FindCommand object for execution.
*/
public static Command parse(String args) {
if (args == null) {
return new ListCommand();
}
// if not match, show all the unfinished task
final Matcher matcher = KEYWORDS_ARGS_FORMAT_LIST.matcher(args.trim());
if (!matcher.matches()) {
return new ListCommand();
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split(LIST_SEPARATOR);
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new ListCommand(keywordSet);
}
}
|
Refactor - remove magic string
|
[V0.4][Logic] Refactor - remove magic string
|
Java
|
mit
|
CS2103JAN2017-W10-B1/main,CS2103JAN2017-W10-B1/main
|
java
|
## Code Before:
package seedu.address.logic.parser;
import static seedu.address.logic.parser.CliSyntax.KEYWORDS_ARGS_FORMAT_LIST;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import seedu.address.logic.commands.Command;
import seedu.address.logic.commands.ListCommand;
/**
* Parses input arguments and creates a new ListCommand object
*/
public class ListCommandParser {
private ListCommandParser() {
}
/**
* Parses the given {@code String} of arguments in the context of the FindCommand
* and returns an FindCommand object for execution.
*/
public static Command parse(String args) {
if (args == null) {
return new ListCommand();
}
// if not match, show all the unfinished task
final Matcher matcher = KEYWORDS_ARGS_FORMAT_LIST.matcher(args.trim());
if (!matcher.matches()) {
return new ListCommand();
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split("\\s+");
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new ListCommand(keywordSet);
}
}
## Instruction:
[V0.4][Logic] Refactor - remove magic string
## Code After:
package seedu.address.logic.parser;
import static seedu.address.logic.parser.CliSyntax.KEYWORDS_ARGS_FORMAT_LIST;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import seedu.address.logic.commands.Command;
import seedu.address.logic.commands.ListCommand;
/**
* Parses input arguments and creates a new ListCommand object
*/
public class ListCommandParser {
private ListCommandParser() {
}
private static final String LIST_SEPARATOR = "\\s+";
/**
* Parses the given {@code String} of arguments in the context of the FindCommand
* and returns an FindCommand object for execution.
*/
public static Command parse(String args) {
if (args == null) {
return new ListCommand();
}
// if not match, show all the unfinished task
final Matcher matcher = KEYWORDS_ARGS_FORMAT_LIST.matcher(args.trim());
if (!matcher.matches()) {
return new ListCommand();
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split(LIST_SEPARATOR);
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new ListCommand(keywordSet);
}
}
|
// ... existing code ...
private ListCommandParser() {
}
private static final String LIST_SEPARATOR = "\\s+";
/**
* Parses the given {@code String} of arguments in the context of the FindCommand
* and returns an FindCommand object for execution.
// ... modified code ...
}
// keywords delimited by whitespace
final String[] keywords = matcher.group("keywords").split(LIST_SEPARATOR);
final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords));
return new ListCommand(keywordSet);
}
// ... rest of the code ...
|
69b262f502bbc48204db70815476aa256bd7db6e
|
rmgpy/tools/canteraTest.py
|
rmgpy/tools/canteraTest.py
|
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import *
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
|
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""
reactorType='IdealGasReactor'
molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac)
|
Add unit test for CanteraCondition that tests that the repr() function works
|
Add unit test for CanteraCondition that tests that the repr() function works
|
Python
|
mit
|
nyee/RMG-Py,nickvandewiele/RMG-Py,chatelak/RMG-Py,nickvandewiele/RMG-Py,chatelak/RMG-Py,nyee/RMG-Py,pierrelb/RMG-Py,pierrelb/RMG-Py
|
python
|
## Code Before:
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import *
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
## Instruction:
Add unit test for CanteraCondition that tests that the repr() function works
## Code After:
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""
reactorType='IdealGasReactor'
molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac)
|
// ... existing code ...
import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
// ... modified code ...
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""
reactorType='IdealGasReactor'
molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac)
// ... rest of the code ...
|
421dbe962dae44cad7aa734a397cb16fe9b1632f
|
reactive/datanode.py
|
reactive/datanode.py
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
Update charms.hadoop reference to follow convention
|
Update charms.hadoop reference to follow convention
|
Python
|
apache-2.0
|
johnsca/layer-apache-hadoop-datanode,juju-solutions/layer-apache-hadoop-datanode
|
python
|
## Code Before:
from charms.reactive import when, when_not, set_state, remove_state
from charms.hadoop import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
## Instruction:
Update charms.hadoop reference to follow convention
## Code After:
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
@when('namenode.ready')
@when_not('datanode.started')
def start_datanode(namenode):
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.configure_datanode(namenode.namenodes()[0], namenode.port())
utils.install_ssh_key('hdfs', namenode.ssh_key())
utils.update_kv_hosts(namenode.hosts_map())
utils.manage_etc_hosts()
hdfs.start_datanode()
hadoop.open_ports('datanode')
set_state('datanode.started')
@when('datanode.started')
@when_not('namenode.ready')
def stop_datanode():
hadoop = get_hadoop_base()
hdfs = HDFS(hadoop)
hdfs.stop_datanode()
hadoop.close_ports('datanode')
remove_state('datanode.started')
|
# ... existing code ...
from charms.reactive import when, when_not, set_state, remove_state
from charms.layer.hadoop_base import get_hadoop_base
from jujubigdata.handlers import HDFS
from jujubigdata import utils
# ... rest of the code ...
|
74041faf3aa2411e431b27561f85bc5bb36f3b85
|
src/main/java/com/amee/platform/science/BaseDate.java
|
src/main/java/com/amee/platform/science/BaseDate.java
|
package com.amee.platform.science;
import org.joda.time.DateTime;
import java.util.Date;
public abstract class BaseDate extends java.util.Date {
protected String dateStr;
public BaseDate(long time) {
setTime(time);
setDefaultDateStr();
}
public BaseDate(String dateStr) {
super();
if (dateStr != null) {
setTime(parseStr(dateStr));
this.dateStr = dateStr;
} else {
setTime(defaultDate());
setDefaultDateStr();
}
}
protected abstract long parseStr(String dateStr);
protected abstract void setDefaultDateStr();
protected abstract long defaultDate();
public String toString() {
return dateStr;
}
public DateTime toDateTime() {
return new DateTime(this.getTime());
}
}
|
package com.amee.platform.science;
import org.joda.time.DateTime;
import java.util.Date;
public abstract class BaseDate extends java.util.Date {
protected String dateStr;
public BaseDate(long time) {
setTime(time);
setDefaultDateStr();
}
public BaseDate(String dateStr) {
super();
if (dateStr != null) {
setTime(parseStr(dateStr));
this.dateStr = dateStr;
} else {
setTime(defaultDate());
setDefaultDateStr();
}
}
protected abstract long parseStr(String dateStr);
protected abstract void setDefaultDateStr();
protected abstract long defaultDate();
public String toString() {
return dateStr;
}
public DateTime toDateTime() {
return new DateTime(this.getTime());
}
public Date toDate() {
return new DateTime(this.getTime()).toDate();
}
}
|
Revert "Removed unused method. PL-1321."
|
Revert "Removed unused method. PL-1321."
This reverts commit a43684f4c0f4753d6906ca6f7e1cf9409bc24488.
|
Java
|
mit
|
OpenAMEE/amee.platform.api
|
java
|
## Code Before:
package com.amee.platform.science;
import org.joda.time.DateTime;
import java.util.Date;
public abstract class BaseDate extends java.util.Date {
protected String dateStr;
public BaseDate(long time) {
setTime(time);
setDefaultDateStr();
}
public BaseDate(String dateStr) {
super();
if (dateStr != null) {
setTime(parseStr(dateStr));
this.dateStr = dateStr;
} else {
setTime(defaultDate());
setDefaultDateStr();
}
}
protected abstract long parseStr(String dateStr);
protected abstract void setDefaultDateStr();
protected abstract long defaultDate();
public String toString() {
return dateStr;
}
public DateTime toDateTime() {
return new DateTime(this.getTime());
}
}
## Instruction:
Revert "Removed unused method. PL-1321."
This reverts commit a43684f4c0f4753d6906ca6f7e1cf9409bc24488.
## Code After:
package com.amee.platform.science;
import org.joda.time.DateTime;
import java.util.Date;
public abstract class BaseDate extends java.util.Date {
protected String dateStr;
public BaseDate(long time) {
setTime(time);
setDefaultDateStr();
}
public BaseDate(String dateStr) {
super();
if (dateStr != null) {
setTime(parseStr(dateStr));
this.dateStr = dateStr;
} else {
setTime(defaultDate());
setDefaultDateStr();
}
}
protected abstract long parseStr(String dateStr);
protected abstract void setDefaultDateStr();
protected abstract long defaultDate();
public String toString() {
return dateStr;
}
public DateTime toDateTime() {
return new DateTime(this.getTime());
}
public Date toDate() {
return new DateTime(this.getTime()).toDate();
}
}
|
// ... existing code ...
public DateTime toDateTime() {
return new DateTime(this.getTime());
}
public Date toDate() {
return new DateTime(this.getTime()).toDate();
}
}
// ... rest of the code ...
|
f38eb25fe13320297baad173c8e6d6ac7cfb9542
|
spacy/tests/tokens/test_vec.py
|
spacy/tests/tokens/test_vec.py
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
|
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
Fix test for word vector
|
Fix test for word vector
|
Python
|
mit
|
oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,explosion/spaCy,explosion/spaCy,raphael0202/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,explosion/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,honnibal/spaCy,raphael0202/spaCy,explosion/spaCy,raphael0202/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,banglakit/spaCy,raphael0202/spaCy,banglakit/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,spacy-io/spaCy,spacy-io/spaCy
|
python
|
## Code Before:
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert 0.08 >= hype.vector[0] > 0.07
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert 0.08 >= hype.vector[0] > 0.07
## Instruction:
Fix test for word vector
## Code After:
from __future__ import unicode_literals
from spacy.en import English
import pytest
@pytest.mark.models
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
|
# ... existing code ...
def test_vec(EN):
hype = EN.vocab['hype']
assert hype.orth_ == 'hype'
assert -0.7 >= hype.vector[0] > -0.8
@pytest.mark.models
# ... modified code ...
def test_capitalized(EN):
hype = EN.vocab['Hype']
assert hype.orth_ == 'Hype'
assert -0.7 >= hype.vector[0] > -0.8
# ... rest of the code ...
|
6c472cf7892335853430dcb59b84e6ba74c29042
|
rxpalette-kotlin/src/main/kotlin/io/sweers/rxpalette/RxPalette.kt
|
rxpalette-kotlin/src/main/kotlin/io/sweers/rxpalette/RxPalette.kt
|
package io.sweers.rxpalette
import android.graphics.Bitmap
import android.support.v7.graphics.Palette
import rx.Observable
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.generate(bitmap: Bitmap): Observable<Palette> = RxPalette.generate(bitmap)
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.Builder.generate(): Observable<Palette> = RxPalette.generate(this)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.generateAsync(bitmap: Bitmap): Observable<Palette> = RxPalette.generateAsync(bitmap)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.Builder.generateAsync(): Observable<Palette> = RxPalette.generateAsync(this)
|
package io.sweers.rxpalette
import android.graphics.Bitmap
import android.support.v7.graphics.Palette
import rx.Observable
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.asObservable(bitmap: Bitmap): Observable<Palette> = RxPalette.generate(bitmap)
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.Builder.asObservable(): Observable<Palette> = RxPalette.generate(this)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.generateAsync(bitmap: Bitmap): Observable<Palette> = RxPalette.generateAsync(bitmap)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.Builder.generateAsync(): Observable<Palette> = RxPalette.generateAsync(this)
|
Change Kotlin API to use asObservable
|
Change Kotlin API to use asObservable
Function extensions don't replace existing signatures
|
Kotlin
|
apache-2.0
|
hzsweers/RxPalette,hzsweers/RxPalette,hzsweers/RxPalette
|
kotlin
|
## Code Before:
package io.sweers.rxpalette
import android.graphics.Bitmap
import android.support.v7.graphics.Palette
import rx.Observable
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.generate(bitmap: Bitmap): Observable<Palette> = RxPalette.generate(bitmap)
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.Builder.generate(): Observable<Palette> = RxPalette.generate(this)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.generateAsync(bitmap: Bitmap): Observable<Palette> = RxPalette.generateAsync(bitmap)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.Builder.generateAsync(): Observable<Palette> = RxPalette.generateAsync(this)
## Instruction:
Change Kotlin API to use asObservable
Function extensions don't replace existing signatures
## Code After:
package io.sweers.rxpalette
import android.graphics.Bitmap
import android.support.v7.graphics.Palette
import rx.Observable
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.asObservable(bitmap: Bitmap): Observable<Palette> = RxPalette.generate(bitmap)
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.Builder.asObservable(): Observable<Palette> = RxPalette.generate(this)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.generateAsync(bitmap: Bitmap): Observable<Palette> = RxPalette.generateAsync(bitmap)
/**
* Generate the `Palette` asynchronously.
*/
public inline fun Palette.Builder.generateAsync(): Observable<Palette> = RxPalette.generateAsync(this)
|
# ... existing code ...
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.asObservable(bitmap: Bitmap): Observable<Palette> = RxPalette.generate(bitmap)
/**
* Generate the `Palette` synchronously.
*/
public inline fun Palette.Builder.asObservable(): Observable<Palette> = RxPalette.generate(this)
/**
* Generate the `Palette` asynchronously.
# ... rest of the code ...
|
2c71d9589947e1e1f3e75e907b1c28beccc7b268
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='[email protected]',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='[email protected]',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
Use pypandoc to convert README.md to RST for long_description
|
Use pypandoc to convert README.md to RST for long_description
|
Python
|
mit
|
mikemill/rq_retry_scheduler
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='[email protected]',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
## Instruction:
Use pypandoc to convert README.md to RST for long_description
## Code After:
from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='[email protected]',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
// ... existing code ...
from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
// ... modified code ...
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='[email protected]',
packages=find_packages(exclude=['*tests*']),
// ... rest of the code ...
|
da097ed41010961cc0814d55d8784787f3ea8a63
|
skimage/util/arraypad.py
|
skimage/util/arraypad.py
|
from __future__ import division, absolute_import, print_function
from numpy import pad as numpy_pad
def pad(array, pad_width, mode, **kwargs):
return numpy_pad(array, pad_width, mode, **kwargs)
# Pull function info / docs from NumPy
pad.__doc__ = numpy_pad.__doc__
|
from __future__ import division, absolute_import, print_function
import numpy as np
def pad(array, pad_width, mode, **kwargs):
return np.pad(array, pad_width, mode, **kwargs)
# Pull function info / docs from NumPy
pad.__doc__ = np.pad.__doc__
|
Change import structure for doctests
|
Change import structure for doctests
|
Python
|
bsd-3-clause
|
rjeli/scikit-image,paalge/scikit-image,rjeli/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,rjeli/scikit-image,paalge/scikit-image
|
python
|
## Code Before:
from __future__ import division, absolute_import, print_function
from numpy import pad as numpy_pad
def pad(array, pad_width, mode, **kwargs):
return numpy_pad(array, pad_width, mode, **kwargs)
# Pull function info / docs from NumPy
pad.__doc__ = numpy_pad.__doc__
## Instruction:
Change import structure for doctests
## Code After:
from __future__ import division, absolute_import, print_function
import numpy as np
def pad(array, pad_width, mode, **kwargs):
return np.pad(array, pad_width, mode, **kwargs)
# Pull function info / docs from NumPy
pad.__doc__ = np.pad.__doc__
|
# ... existing code ...
from __future__ import division, absolute_import, print_function
import numpy as np
def pad(array, pad_width, mode, **kwargs):
return np.pad(array, pad_width, mode, **kwargs)
# Pull function info / docs from NumPy
pad.__doc__ = np.pad.__doc__
# ... rest of the code ...
|
30f0b99a2233c6009a3c41d9b22e3f946c40c3cf
|
kitchen/urls.py
|
kitchen/urls.py
|
"""Root URL routing"""
from django.conf.urls.defaults import patterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from kitchen.dashboard import api
import kitchen.settings as settings
urlpatterns = patterns('',
(r'^$', 'kitchen.dashboard.views.list'),
(r'^virt/$', 'kitchen.dashboard.views.virt'),
(r'^graph/$', 'kitchen.dashboard.views.graph'),
(r'^plugins/((?P<plugin_type>(virt|v|list|l))/)?(?P<name>[\w\-\_]+)/(?P<method>\w+)/?$', 'kitchen.dashboard.views.plugins'),
(r'^api/nodes/(?P<name>\w+)$', api.get_node),
(r'^api/nodes', api.get_nodes),
(r'^api/roles', api.get_roles),
(r'^404', TemplateView.as_view(template_name="404.html")),
)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
"""Root URL routing"""
from django.conf.urls.defaults import patterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from kitchen.dashboard import api
import kitchen.settings as settings
if settings.SHOW_LIST_VIEW:
root_view = 'kitchen.dashboard.views.list'
elif settings.SHOW_VIRT_VIEW:
root_view = 'kitchen.dashboard.views.virt'
elif settings.SHOW_GRAPH_VIEW:
root_view = 'kitchen.dashboard.views.graph'
else:
raise Exception("No views enabled! Please edit settings.py.")
urlpatterns = patterns('',
(r'^$', root_view),
(r'^virt/$', 'kitchen.dashboard.views.virt'),
(r'^graph/$', 'kitchen.dashboard.views.graph'),
(r'^plugins/((?P<plugin_type>(virt|v|list|l))/)?(?P<name>[\w\-\_]+)/(?P<method>\w+)/?$', 'kitchen.dashboard.views.plugins'),
(r'^api/nodes/(?P<name>\w+)$', api.get_node),
(r'^api/nodes', api.get_nodes),
(r'^api/roles', api.get_roles),
(r'^404', TemplateView.as_view(template_name="404.html")),
)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Set root view depending on what views are enabled
|
Set root view depending on what views are enabled
|
Python
|
apache-2.0
|
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
|
python
|
## Code Before:
"""Root URL routing"""
from django.conf.urls.defaults import patterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from kitchen.dashboard import api
import kitchen.settings as settings
urlpatterns = patterns('',
(r'^$', 'kitchen.dashboard.views.list'),
(r'^virt/$', 'kitchen.dashboard.views.virt'),
(r'^graph/$', 'kitchen.dashboard.views.graph'),
(r'^plugins/((?P<plugin_type>(virt|v|list|l))/)?(?P<name>[\w\-\_]+)/(?P<method>\w+)/?$', 'kitchen.dashboard.views.plugins'),
(r'^api/nodes/(?P<name>\w+)$', api.get_node),
(r'^api/nodes', api.get_nodes),
(r'^api/roles', api.get_roles),
(r'^404', TemplateView.as_view(template_name="404.html")),
)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
## Instruction:
Set root view depending on what views are enabled
## Code After:
"""Root URL routing"""
from django.conf.urls.defaults import patterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from kitchen.dashboard import api
import kitchen.settings as settings
if settings.SHOW_LIST_VIEW:
root_view = 'kitchen.dashboard.views.list'
elif settings.SHOW_VIRT_VIEW:
root_view = 'kitchen.dashboard.views.virt'
elif settings.SHOW_GRAPH_VIEW:
root_view = 'kitchen.dashboard.views.graph'
else:
raise Exception("No views enabled! Please edit settings.py.")
urlpatterns = patterns('',
(r'^$', root_view),
(r'^virt/$', 'kitchen.dashboard.views.virt'),
(r'^graph/$', 'kitchen.dashboard.views.graph'),
(r'^plugins/((?P<plugin_type>(virt|v|list|l))/)?(?P<name>[\w\-\_]+)/(?P<method>\w+)/?$', 'kitchen.dashboard.views.plugins'),
(r'^api/nodes/(?P<name>\w+)$', api.get_node),
(r'^api/nodes', api.get_nodes),
(r'^api/roles', api.get_roles),
(r'^404', TemplateView.as_view(template_name="404.html")),
)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
# ... existing code ...
import kitchen.settings as settings
if settings.SHOW_LIST_VIEW:
root_view = 'kitchen.dashboard.views.list'
elif settings.SHOW_VIRT_VIEW:
root_view = 'kitchen.dashboard.views.virt'
elif settings.SHOW_GRAPH_VIEW:
root_view = 'kitchen.dashboard.views.graph'
else:
raise Exception("No views enabled! Please edit settings.py.")
urlpatterns = patterns('',
(r'^$', root_view),
(r'^virt/$', 'kitchen.dashboard.views.virt'),
(r'^graph/$', 'kitchen.dashboard.views.graph'),
(r'^plugins/((?P<plugin_type>(virt|v|list|l))/)?(?P<name>[\w\-\_]+)/(?P<method>\w+)/?$', 'kitchen.dashboard.views.plugins'),
# ... rest of the code ...
|
1d88ea54d1f4ce63893b906a5b79faa4dd25243f
|
grow/commands/convert.py
|
grow/commands/convert.py
|
from grow.pods import pods
from grow.pods import storage
from grow.conversion import *
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
from grow.pods import pods
from grow.pods import storage
from grow.conversion import content_locale_split
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
Adjust import to fix build with PyInstaller.
|
Adjust import to fix build with PyInstaller.
|
Python
|
mit
|
grow/pygrow,grow/pygrow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/grow
|
python
|
## Code Before:
from grow.pods import pods
from grow.pods import storage
from grow.conversion import *
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
## Instruction:
Adjust import to fix build with PyInstaller.
## Code After:
from grow.pods import pods
from grow.pods import storage
from grow.conversion import content_locale_split
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
...
from grow.pods import pods
from grow.pods import storage
from grow.conversion import content_locale_split
import click
import os
...
|
13fcfe538becc6a0e72d2c25a26e42f3f395c52c
|
src/com/growingwiththeweb/algorithms/interviewquestions/dividewithoutdivide/Program.java
|
src/com/growingwiththeweb/algorithms/interviewquestions/dividewithoutdivide/Program.java
|
package com.growingwiththeweb.algorithms.interviewquestions.dividewithoutdivide;
public class Program {
public static void main(String[] args) {
assert divide(10,4) == 2;
assert divide(11,4) == 2;
assert divide(12,4) == 3;
assert divide(13,4) == 3;
assert divide(130,3) == 43;
System.out.println("Tests passed");
}
public static double divide(double a, double b) {
int sign = 1;
if (a < 0) {
a = -a;
sign = -sign;
}
if (b < 0) {
b = -b;
sign = -sign;
}
double result = 0;
while (a >= 0) {
a -= b;
result++;
}
return (result - 1) * sign;
}
}
|
package com.growingwiththeweb.algorithms.interviewquestions.dividewithoutdivide;
import java.lang.ArithmeticException;
public class Program {
public static void main(String[] args) {
assert divide(10, 4) == 2;
assert divide(11, 4) == 2;
assert divide(12, 4) == 3;
assert divide(13, 4) == 3;
assert divide(130,3 ) == 43;
boolean threw = false;
try {
divide(1, 0);
} catch (ArithmeticException ex) {
threw = true;
}
assert threw;
System.out.println("Tests passed");
}
public static double divide(double a, double b) {
if (b == 0) {
throw new ArithmeticException(
"Division by 0 is undefined: " + a + "/" + b);
}
int sign = 1;
if (a < 0) {
a = -a;
sign = -sign;
}
if (b < 0) {
b = -b;
sign = -sign;
}
double result = 0;
while (a >= 0) {
a -= b;
result++;
}
return (result - 1) * sign;
}
}
|
Add exception for division by zero
|
Add exception for division by zero
|
Java
|
mit
|
Tyriar/growing-with-the-web,Tyriar/growing-with-the-web,gwtw/growing-with-the-web,gwtw/growing-with-the-web
|
java
|
## Code Before:
package com.growingwiththeweb.algorithms.interviewquestions.dividewithoutdivide;
public class Program {
public static void main(String[] args) {
assert divide(10,4) == 2;
assert divide(11,4) == 2;
assert divide(12,4) == 3;
assert divide(13,4) == 3;
assert divide(130,3) == 43;
System.out.println("Tests passed");
}
public static double divide(double a, double b) {
int sign = 1;
if (a < 0) {
a = -a;
sign = -sign;
}
if (b < 0) {
b = -b;
sign = -sign;
}
double result = 0;
while (a >= 0) {
a -= b;
result++;
}
return (result - 1) * sign;
}
}
## Instruction:
Add exception for division by zero
## Code After:
package com.growingwiththeweb.algorithms.interviewquestions.dividewithoutdivide;
import java.lang.ArithmeticException;
public class Program {
public static void main(String[] args) {
assert divide(10, 4) == 2;
assert divide(11, 4) == 2;
assert divide(12, 4) == 3;
assert divide(13, 4) == 3;
assert divide(130,3 ) == 43;
boolean threw = false;
try {
divide(1, 0);
} catch (ArithmeticException ex) {
threw = true;
}
assert threw;
System.out.println("Tests passed");
}
public static double divide(double a, double b) {
if (b == 0) {
throw new ArithmeticException(
"Division by 0 is undefined: " + a + "/" + b);
}
int sign = 1;
if (a < 0) {
a = -a;
sign = -sign;
}
if (b < 0) {
b = -b;
sign = -sign;
}
double result = 0;
while (a >= 0) {
a -= b;
result++;
}
return (result - 1) * sign;
}
}
|
// ... existing code ...
package com.growingwiththeweb.algorithms.interviewquestions.dividewithoutdivide;
import java.lang.ArithmeticException;
public class Program {
public static void main(String[] args) {
assert divide(10, 4) == 2;
assert divide(11, 4) == 2;
assert divide(12, 4) == 3;
assert divide(13, 4) == 3;
assert divide(130,3 ) == 43;
boolean threw = false;
try {
divide(1, 0);
} catch (ArithmeticException ex) {
threw = true;
}
assert threw;
System.out.println("Tests passed");
}
public static double divide(double a, double b) {
if (b == 0) {
throw new ArithmeticException(
"Division by 0 is undefined: " + a + "/" + b);
}
int sign = 1;
if (a < 0) {
a = -a;
// ... rest of the code ...
|
0460404bb7f3e9a9f6ece1c4a141b16fced6f741
|
tests/test_chunked_http.py
|
tests/test_chunked_http.py
|
from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
Use the disco.compat.http_server to work with python3.
|
Use the disco.compat.http_server to work with python3.
|
Python
|
bsd-3-clause
|
pombredanne/disco,simudream/disco,ErikDubbelboer/disco,beni55/disco,discoproject/disco,ErikDubbelboer/disco,oldmantaiter/disco,simudream/disco,oldmantaiter/disco,seabirdzh/disco,seabirdzh/disco,seabirdzh/disco,discoproject/disco,ktkt2009/disco,discoproject/disco,ktkt2009/disco,beni55/disco,ErikDubbelboer/disco,pombredanne/disco,mozilla/disco,mozilla/disco,discoproject/disco,beni55/disco,mwilliams3/disco,mwilliams3/disco,pombredanne/disco,simudream/disco,pombredanne/disco,seabirdzh/disco,ErikDubbelboer/disco,ktkt2009/disco,beni55/disco,ErikDubbelboer/disco,ktkt2009/disco,seabirdzh/disco,beni55/disco,pooya/disco,mozilla/disco,mozilla/disco,pooya/disco,ktkt2009/disco,oldmantaiter/disco,discoproject/disco,pooya/disco,simudream/disco,oldmantaiter/disco,pombredanne/disco,oldmantaiter/disco,mwilliams3/disco,mwilliams3/disco,simudream/disco,pooya/disco,mwilliams3/disco
|
python
|
## Code Before:
from disco.test import TestCase, TestJob
from disco.core import Job
import disco
import threading
import BaseHTTPServer
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
## Instruction:
Use the disco.compat.http_server to work with python3.
## Code After:
from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
for word in line.split():
yield word, 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.send_header("Transfer-Encoding", "chunked")
s.end_headers()
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
class RawTestCase(TestCase):
def runTest(self):
threading.Thread(target=startServer).start()
input = 'http:' + self.disco.master.split(':')[1] + ":" + str(PORT)
self.job = Job().run(input=[input], map=map, reduce=reduce)
self.assertEqual(sorted(self.results(self.job)), [('Hello', 1), ('World', 1)])
|
# ... existing code ...
from disco.test import TestCase, TestJob
from disco.core import Job
from disco.compat import http_server
import disco
import threading
def map(line, params):
# ... modified code ...
PORT = 1234
class MyHandler(http_server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
...
s.wfile.write("b\r\nHello World\r\n0\r\n\r\n")
def startServer():
server_class = http_server.HTTPServer
httpd = server_class(('', PORT), MyHandler)
httpd.handle_request()
httpd.server_close()
# ... rest of the code ...
|
7f212a9bacfce6612c6ec435174bf9c3eddd4652
|
pagoeta/apps/events/serializers.py
|
pagoeta/apps/events/serializers.py
|
from hvad.contrib.restframework import TranslatableModelSerializer
from rest_framework import serializers
from rest_framework.reverse import reverse
from .models import Category, TargetGroup, TargetAge, Event
from pagoeta.apps.core.functions import get_absolute_uri
from pagoeta.apps.places.serializers import PlaceListSerializer
class TypeField(serializers.RelatedField):
def to_representation(self, value):
return {
'code': value.code,
'name': value.name
}
class EventSerializer(TranslatableModelSerializer):
category = TypeField(read_only=True)
target_group = TypeField(read_only=True)
targetGroup = target_group
target_age = TypeField(read_only=True)
targetAge = target_age
place = PlaceListSerializer(read_only=True)
# camelCase some field names
startAt = serializers.DateTimeField(source='start_at', read_only=True)
endAt = serializers.DateTimeField(source='end_at', read_only=True)
isFeatured = serializers.BooleanField(source='is_featured', read_only=True)
isVisible = serializers.BooleanField(source='is_visible', read_only=True)
href = serializers.SerializerMethodField()
class Meta(object):
model = Event
exclude = ('start_at', 'end_at', 'is_featured', 'is_visible', 'language_code')
def get_href(self, obj):
return get_absolute_uri(reverse('v1:event-detail', [obj.id]))
|
from hvad.contrib.restframework import TranslatableModelSerializer
from rest_framework import serializers
from rest_framework.reverse import reverse
from .models import Category, TargetGroup, TargetAge, Event
from pagoeta.apps.core.functions import get_absolute_uri
from pagoeta.apps.places.serializers import PlaceListSerializer
class TypeField(serializers.RelatedField):
def to_representation(self, value):
return {
'code': value.code,
'name': value.name
}
class EventSerializer(TranslatableModelSerializer):
category = TypeField(read_only=True)
place = PlaceListSerializer(read_only=True)
# camelCase some field names
targetGroup = TypeField(source='target_group', read_only=True)
targetAge = TypeField(source='target_age', read_only=True)
startAt = serializers.DateTimeField(source='start_at', read_only=True)
endAt = serializers.DateTimeField(source='end_at', read_only=True)
isFeatured = serializers.BooleanField(source='is_featured', read_only=True)
isVisible = serializers.BooleanField(source='is_visible', read_only=True)
href = serializers.SerializerMethodField()
class Meta(object):
model = Event
exclude = ('target_group', 'target_age', 'start_at', 'end_at', 'is_featured', 'is_visible', 'language_code')
def get_href(self, obj):
return get_absolute_uri(reverse('v1:event-detail', [obj.id]))
|
Change camelCasing strategy for `target_age` and `target_group`
|
Change camelCasing strategy for `target_age` and `target_group`
|
Python
|
mit
|
zarautz/pagoeta,zarautz/pagoeta,zarautz/pagoeta
|
python
|
## Code Before:
from hvad.contrib.restframework import TranslatableModelSerializer
from rest_framework import serializers
from rest_framework.reverse import reverse
from .models import Category, TargetGroup, TargetAge, Event
from pagoeta.apps.core.functions import get_absolute_uri
from pagoeta.apps.places.serializers import PlaceListSerializer
class TypeField(serializers.RelatedField):
def to_representation(self, value):
return {
'code': value.code,
'name': value.name
}
class EventSerializer(TranslatableModelSerializer):
category = TypeField(read_only=True)
target_group = TypeField(read_only=True)
targetGroup = target_group
target_age = TypeField(read_only=True)
targetAge = target_age
place = PlaceListSerializer(read_only=True)
# camelCase some field names
startAt = serializers.DateTimeField(source='start_at', read_only=True)
endAt = serializers.DateTimeField(source='end_at', read_only=True)
isFeatured = serializers.BooleanField(source='is_featured', read_only=True)
isVisible = serializers.BooleanField(source='is_visible', read_only=True)
href = serializers.SerializerMethodField()
class Meta(object):
model = Event
exclude = ('start_at', 'end_at', 'is_featured', 'is_visible', 'language_code')
def get_href(self, obj):
return get_absolute_uri(reverse('v1:event-detail', [obj.id]))
## Instruction:
Change camelCasing strategy for `target_age` and `target_group`
## Code After:
from hvad.contrib.restframework import TranslatableModelSerializer
from rest_framework import serializers
from rest_framework.reverse import reverse
from .models import Category, TargetGroup, TargetAge, Event
from pagoeta.apps.core.functions import get_absolute_uri
from pagoeta.apps.places.serializers import PlaceListSerializer
class TypeField(serializers.RelatedField):
def to_representation(self, value):
return {
'code': value.code,
'name': value.name
}
class EventSerializer(TranslatableModelSerializer):
category = TypeField(read_only=True)
place = PlaceListSerializer(read_only=True)
# camelCase some field names
targetGroup = TypeField(source='target_group', read_only=True)
targetAge = TypeField(source='target_age', read_only=True)
startAt = serializers.DateTimeField(source='start_at', read_only=True)
endAt = serializers.DateTimeField(source='end_at', read_only=True)
isFeatured = serializers.BooleanField(source='is_featured', read_only=True)
isVisible = serializers.BooleanField(source='is_visible', read_only=True)
href = serializers.SerializerMethodField()
class Meta(object):
model = Event
exclude = ('target_group', 'target_age', 'start_at', 'end_at', 'is_featured', 'is_visible', 'language_code')
def get_href(self, obj):
return get_absolute_uri(reverse('v1:event-detail', [obj.id]))
|
# ... existing code ...
class EventSerializer(TranslatableModelSerializer):
category = TypeField(read_only=True)
place = PlaceListSerializer(read_only=True)
# camelCase some field names
targetGroup = TypeField(source='target_group', read_only=True)
targetAge = TypeField(source='target_age', read_only=True)
startAt = serializers.DateTimeField(source='start_at', read_only=True)
endAt = serializers.DateTimeField(source='end_at', read_only=True)
isFeatured = serializers.BooleanField(source='is_featured', read_only=True)
# ... modified code ...
class Meta(object):
model = Event
exclude = ('target_group', 'target_age', 'start_at', 'end_at', 'is_featured', 'is_visible', 'language_code')
def get_href(self, obj):
return get_absolute_uri(reverse('v1:event-detail', [obj.id]))
# ... rest of the code ...
|
0b1d786514dcd6d94913613f3823a1fd2c80c152
|
core/deployment/src/main/java/io/quarkus/deployment/SslProcessor.java
|
core/deployment/src/main/java/io/quarkus/deployment/SslProcessor.java
|
package io.quarkus.deployment;
import java.util.Optional;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.builditem.SslNativeConfigBuildItem;
import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem;
import io.quarkus.runtime.annotations.ConfigItem;
import io.quarkus.runtime.annotations.ConfigPhase;
import io.quarkus.runtime.annotations.ConfigRoot;
public class SslProcessor {
private static final String JAVA_11_PLUS_SSL_LOGGER = "sun.security.ssl.SSLLogger";
private static final String JAVA_8_PLUS_SSL_LOGGER = "sun.security.ssl.Debug";
SslConfig ssl;
@ConfigRoot(phase = ConfigPhase.BUILD_TIME)
static class SslConfig {
/**
* Enable native SSL support.
*/
@ConfigItem(name = "native")
Optional<Boolean> native_;
}
@BuildStep
SslNativeConfigBuildItem setupNativeSsl() {
return new SslNativeConfigBuildItem(ssl.native_);
}
@BuildStep
void runtime(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized) {
registerIfExists(reinitialized, JAVA_11_PLUS_SSL_LOGGER);
registerIfExists(reinitialized, JAVA_8_PLUS_SSL_LOGGER);
}
private void registerIfExists(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized, String className) {
try {
Class.forName(className, false, Thread.currentThread().getContextClassLoader());
reinitialized.produce(new RuntimeReinitializedClassBuildItem(className));
} catch (ClassNotFoundException ignored) {
}
}
}
|
package io.quarkus.deployment;
import java.util.Optional;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.builditem.SslNativeConfigBuildItem;
import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem;
import io.quarkus.runtime.annotations.ConfigItem;
import io.quarkus.runtime.annotations.ConfigPhase;
import io.quarkus.runtime.annotations.ConfigRoot;
public class SslProcessor {
private static final String JAVA_11_PLUS_SSL_LOGGER = "sun.security.ssl.SSLLogger";
SslConfig ssl;
@ConfigRoot(phase = ConfigPhase.BUILD_TIME)
static class SslConfig {
/**
* Enable native SSL support.
*/
@ConfigItem(name = "native")
Optional<Boolean> native_;
}
@BuildStep
SslNativeConfigBuildItem setupNativeSsl() {
return new SslNativeConfigBuildItem(ssl.native_);
}
@BuildStep
void runtime(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized) {
reinitialized.produce(new RuntimeReinitializedClassBuildItem(JAVA_11_PLUS_SSL_LOGGER));
}
}
|
Remove conditional code testing JDK 8
|
Remove conditional code testing JDK 8
|
Java
|
apache-2.0
|
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
|
java
|
## Code Before:
package io.quarkus.deployment;
import java.util.Optional;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.builditem.SslNativeConfigBuildItem;
import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem;
import io.quarkus.runtime.annotations.ConfigItem;
import io.quarkus.runtime.annotations.ConfigPhase;
import io.quarkus.runtime.annotations.ConfigRoot;
public class SslProcessor {
private static final String JAVA_11_PLUS_SSL_LOGGER = "sun.security.ssl.SSLLogger";
private static final String JAVA_8_PLUS_SSL_LOGGER = "sun.security.ssl.Debug";
SslConfig ssl;
@ConfigRoot(phase = ConfigPhase.BUILD_TIME)
static class SslConfig {
/**
* Enable native SSL support.
*/
@ConfigItem(name = "native")
Optional<Boolean> native_;
}
@BuildStep
SslNativeConfigBuildItem setupNativeSsl() {
return new SslNativeConfigBuildItem(ssl.native_);
}
@BuildStep
void runtime(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized) {
registerIfExists(reinitialized, JAVA_11_PLUS_SSL_LOGGER);
registerIfExists(reinitialized, JAVA_8_PLUS_SSL_LOGGER);
}
private void registerIfExists(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized, String className) {
try {
Class.forName(className, false, Thread.currentThread().getContextClassLoader());
reinitialized.produce(new RuntimeReinitializedClassBuildItem(className));
} catch (ClassNotFoundException ignored) {
}
}
}
## Instruction:
Remove conditional code testing JDK 8
## Code After:
package io.quarkus.deployment;
import java.util.Optional;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.builditem.SslNativeConfigBuildItem;
import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem;
import io.quarkus.runtime.annotations.ConfigItem;
import io.quarkus.runtime.annotations.ConfigPhase;
import io.quarkus.runtime.annotations.ConfigRoot;
public class SslProcessor {
private static final String JAVA_11_PLUS_SSL_LOGGER = "sun.security.ssl.SSLLogger";
SslConfig ssl;
@ConfigRoot(phase = ConfigPhase.BUILD_TIME)
static class SslConfig {
/**
* Enable native SSL support.
*/
@ConfigItem(name = "native")
Optional<Boolean> native_;
}
@BuildStep
SslNativeConfigBuildItem setupNativeSsl() {
return new SslNativeConfigBuildItem(ssl.native_);
}
@BuildStep
void runtime(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized) {
reinitialized.produce(new RuntimeReinitializedClassBuildItem(JAVA_11_PLUS_SSL_LOGGER));
}
}
|
# ... existing code ...
public class SslProcessor {
private static final String JAVA_11_PLUS_SSL_LOGGER = "sun.security.ssl.SSLLogger";
SslConfig ssl;
# ... modified code ...
@BuildStep
void runtime(BuildProducer<RuntimeReinitializedClassBuildItem> reinitialized) {
reinitialized.produce(new RuntimeReinitializedClassBuildItem(JAVA_11_PLUS_SSL_LOGGER));
}
}
# ... rest of the code ...
|
ffbd18870f6fa2284884e6c425e51f00efe48cc2
|
src/util/files.c
|
src/util/files.c
|
//
// Created by gravypod on 9/20/17.
//
#include "files.h"
#include <sys/stat.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
long int fsize(const char *filename)
{
struct stat st;
if (stat(filename, &st) == 0)
return st.st_size;
return -1;
}
char* read_file(const char* filename)
{
const size_t file_size = (size_t) fsize(filename);
FILE *f;
if (file_size == -1 || !(f = fopen(filename, "rb"))) {
return NULL;
}
size_t data_left = file_size;
char *buffer = malloc(file_size + 1);
char *tmp = buffer;
while (data_left > 0)
{
const size_t len = fread((void *) tmp, sizeof(char), sizeof(buffer), f);
data_left -= len;
tmp += len;
}
buffer[file_size] = 0;
fclose(f);
return buffer;
}
|
//
// Created by gravypod on 9/20/17.
//
#include "files.h"
#include <sys/stat.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
long int fsize(const char *filename)
{
struct stat st;
if (stat(filename, &st) == 0)
return st.st_size;
return -1;
}
char* read_file(const char* filename)
{
long int reported_size = fsize(filename);
FILE *f;
if (reported_size == -1 || !(f = fopen(filename, "rb"))) {
return NULL;
}
const size_t file_size = (size_t) reported_size;
size_t data_left = file_size;
char *buffer = malloc(file_size + 1);
char *tmp = buffer;
while (data_left > 0)
{
const size_t len = fread((void *) tmp, sizeof(char), sizeof(buffer), f);
data_left -= len;
tmp += len;
}
buffer[file_size] = 0;
fclose(f);
return buffer;
}
|
Make sure file stat actually finished. Don't check a size_t for neg.
|
Make sure file stat actually finished. Don't check a size_t for neg.
|
C
|
mit
|
gravypod/solid-snake,gravypod/solid-snake,gravypod/solid-snake
|
c
|
## Code Before:
//
// Created by gravypod on 9/20/17.
//
#include "files.h"
#include <sys/stat.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
long int fsize(const char *filename)
{
struct stat st;
if (stat(filename, &st) == 0)
return st.st_size;
return -1;
}
char* read_file(const char* filename)
{
const size_t file_size = (size_t) fsize(filename);
FILE *f;
if (file_size == -1 || !(f = fopen(filename, "rb"))) {
return NULL;
}
size_t data_left = file_size;
char *buffer = malloc(file_size + 1);
char *tmp = buffer;
while (data_left > 0)
{
const size_t len = fread((void *) tmp, sizeof(char), sizeof(buffer), f);
data_left -= len;
tmp += len;
}
buffer[file_size] = 0;
fclose(f);
return buffer;
}
## Instruction:
Make sure file stat actually finished. Don't check a size_t for neg.
## Code After:
//
// Created by gravypod on 9/20/17.
//
#include "files.h"
#include <sys/stat.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
long int fsize(const char *filename)
{
struct stat st;
if (stat(filename, &st) == 0)
return st.st_size;
return -1;
}
char* read_file(const char* filename)
{
long int reported_size = fsize(filename);
FILE *f;
if (reported_size == -1 || !(f = fopen(filename, "rb"))) {
return NULL;
}
const size_t file_size = (size_t) reported_size;
size_t data_left = file_size;
char *buffer = malloc(file_size + 1);
char *tmp = buffer;
while (data_left > 0)
{
const size_t len = fread((void *) tmp, sizeof(char), sizeof(buffer), f);
data_left -= len;
tmp += len;
}
buffer[file_size] = 0;
fclose(f);
return buffer;
}
|
// ... existing code ...
char* read_file(const char* filename)
{
long int reported_size = fsize(filename);
FILE *f;
if (reported_size == -1 || !(f = fopen(filename, "rb"))) {
return NULL;
}
const size_t file_size = (size_t) reported_size;
size_t data_left = file_size;
char *buffer = malloc(file_size + 1);
char *tmp = buffer;
// ... rest of the code ...
|
addc1e83911f72282eca9603e2c483ba6ef5ef7c
|
packages/xsp.py
|
packages/xsp.py
|
GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
|
GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
Update to the latest XSP.
|
Update to the latest XSP.
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild
|
python
|
## Code Before:
GitHubTarballPackage('mono', 'xsp', '2.11', 'd3e2f80ff59ddff68e757a520655555e2fbf2695', configure = './autogen.sh --prefix="%{prefix}"')
## Instruction:
Update to the latest XSP.
## Code After:
GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
|
# ... existing code ...
GitHubTarballPackage('mono', 'xsp', '3.0.11', '4587438369691b9b3e8415e1f113aa98b57d1fde', configure = './autogen.sh --prefix="%{prefix}"')
# ... rest of the code ...
|
56c0d2ea610aae35edfef2d242e0c4ca6a236a4d
|
crypto.py
|
crypto.py
|
from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
Replace file_io usage with open
|
Replace file_io usage with open
|
Python
|
unlicense
|
kvikshaug/pwkeeper
|
python
|
## Code Before:
from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
## Instruction:
Replace file_io usage with open
## Code After:
from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
...
from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
...
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
...
|
12894405c25cffe7304aeab5650b73740dae9a52
|
neuralnetwork-core/src/main/java/org/neuralnetwork/synapse/AbstractSynapse.java
|
neuralnetwork-core/src/main/java/org/neuralnetwork/synapse/AbstractSynapse.java
|
package org.neuralnetwork.synapse;
public abstract class AbstractSynapse<InputType, ValueType> {
private final InputType input;
public AbstractSynapse(InputType input) {
this.input = input;
}
abstract protected ValueType readValueFrom(InputType input);
public ValueType getValue() {
return readValueFrom(input);
}
}
|
package org.neuralnetwork.synapse;
public abstract class AbstractSynapse<InputType, ValueType> {
private final InputType input;
public AbstractSynapse(InputType input) {
this.input = input;
}
abstract protected ValueType readValueFrom(InputType input);
public ValueType getValue() {
return readValueFrom(input);
}
public InputType getInput() {
return input;
}
}
|
Add a getter for the synapses input.
|
Add a getter for the synapses input.
|
Java
|
bsd-3-clause
|
matthieu-vergne/Neural-Network
|
java
|
## Code Before:
package org.neuralnetwork.synapse;
public abstract class AbstractSynapse<InputType, ValueType> {
private final InputType input;
public AbstractSynapse(InputType input) {
this.input = input;
}
abstract protected ValueType readValueFrom(InputType input);
public ValueType getValue() {
return readValueFrom(input);
}
}
## Instruction:
Add a getter for the synapses input.
## Code After:
package org.neuralnetwork.synapse;
public abstract class AbstractSynapse<InputType, ValueType> {
private final InputType input;
public AbstractSynapse(InputType input) {
this.input = input;
}
abstract protected ValueType readValueFrom(InputType input);
public ValueType getValue() {
return readValueFrom(input);
}
public InputType getInput() {
return input;
}
}
|
// ... existing code ...
public ValueType getValue() {
return readValueFrom(input);
}
public InputType getInput() {
return input;
}
}
// ... rest of the code ...
|
3be9f4cc84d479a16284bcabb29b681db215bd22
|
src/main/java/org/cyclops/integratedtunnels/core/ItemStackPredicate.java
|
src/main/java/org/cyclops/integratedtunnels/core/ItemStackPredicate.java
|
package org.cyclops.integratedtunnels.core;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.function.Predicate;
/**
* A predicate for matching ItemStacks.
* @author rubensworks
*/
public abstract class ItemStackPredicate implements Predicate<ItemStack> {
private final ItemStack itemStack;
private final int matchFlags;
private final boolean blacklist;
public ItemStackPredicate(@Nonnull ItemStack itemStack, int matchFlags, boolean blacklist) {
this.itemStack = itemStack;
this.matchFlags = matchFlags;
this.blacklist = blacklist;
}
public ItemStackPredicate(boolean blacklist) {
this(ItemStack.EMPTY, -1, blacklist);
}
@Nonnull
public ItemStack getItemStack() {
return itemStack;
}
public int getMatchFlags() {
return matchFlags;
}
public boolean hasMatchFlags() {
return getMatchFlags() >= 0;
}
public boolean isBlacklist() {
return blacklist;
}
}
|
package org.cyclops.integratedtunnels.core;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.function.Predicate;
/**
* A predicate for matching ItemStacks.
* @author rubensworks
*/
public abstract class ItemStackPredicate implements Predicate<ItemStack> {
private final ItemStack itemStack;
private final int matchFlags;
private final boolean blacklist;
public ItemStackPredicate(@Nonnull ItemStack itemStack, int matchFlags, boolean blacklist) {
this.itemStack = itemStack;
this.matchFlags = matchFlags;
this.blacklist = blacklist;
}
public ItemStackPredicate(boolean blacklist) {
this(ItemStack.EMPTY, -1, blacklist);
}
@Nonnull
public ItemStack getItemStack() {
return itemStack;
}
public int getMatchFlags() {
return matchFlags;
}
public boolean hasMatchFlags() {
return getMatchFlags() >= 0 && !getItemStack().isEmpty();
}
public boolean isBlacklist() {
return blacklist;
}
}
|
Fix item transfer happening on all items when an empty filter was given
|
Fix item transfer happening on all items when an empty filter was given
Closes ##121
|
Java
|
mit
|
CyclopsMC/IntegratedTunnels
|
java
|
## Code Before:
package org.cyclops.integratedtunnels.core;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.function.Predicate;
/**
* A predicate for matching ItemStacks.
* @author rubensworks
*/
public abstract class ItemStackPredicate implements Predicate<ItemStack> {
private final ItemStack itemStack;
private final int matchFlags;
private final boolean blacklist;
public ItemStackPredicate(@Nonnull ItemStack itemStack, int matchFlags, boolean blacklist) {
this.itemStack = itemStack;
this.matchFlags = matchFlags;
this.blacklist = blacklist;
}
public ItemStackPredicate(boolean blacklist) {
this(ItemStack.EMPTY, -1, blacklist);
}
@Nonnull
public ItemStack getItemStack() {
return itemStack;
}
public int getMatchFlags() {
return matchFlags;
}
public boolean hasMatchFlags() {
return getMatchFlags() >= 0;
}
public boolean isBlacklist() {
return blacklist;
}
}
## Instruction:
Fix item transfer happening on all items when an empty filter was given
Closes ##121
## Code After:
package org.cyclops.integratedtunnels.core;
import net.minecraft.item.ItemStack;
import javax.annotation.Nonnull;
import java.util.function.Predicate;
/**
* A predicate for matching ItemStacks.
* @author rubensworks
*/
public abstract class ItemStackPredicate implements Predicate<ItemStack> {
private final ItemStack itemStack;
private final int matchFlags;
private final boolean blacklist;
public ItemStackPredicate(@Nonnull ItemStack itemStack, int matchFlags, boolean blacklist) {
this.itemStack = itemStack;
this.matchFlags = matchFlags;
this.blacklist = blacklist;
}
public ItemStackPredicate(boolean blacklist) {
this(ItemStack.EMPTY, -1, blacklist);
}
@Nonnull
public ItemStack getItemStack() {
return itemStack;
}
public int getMatchFlags() {
return matchFlags;
}
public boolean hasMatchFlags() {
return getMatchFlags() >= 0 && !getItemStack().isEmpty();
}
public boolean isBlacklist() {
return blacklist;
}
}
|
...
}
public boolean hasMatchFlags() {
return getMatchFlags() >= 0 && !getItemStack().isEmpty();
}
public boolean isBlacklist() {
...
|
f410b51f850d2fb75de16d9de4e95be5eb7a4e07
|
python/peacock/utils/TextSubWindow.py
|
python/peacock/utils/TextSubWindow.py
|
from PyQt5 import QtCore, QtWidgets
class TextSubWindow(QtWidgets.QTextEdit):
"""
TextEdit that saves it size when it closes and closes itself if the main widget disappears.
"""
def __init__(self):
super(TextSubWindow, self).__init__()
self.setWindowFlags(QtCore.Qt.SubWindow)
self._size = None
def sizeHint(self, *args):
"""
Return the saved size.
"""
if self._size:
return self._size
else:
return super(TextSubWindow, self).size()
def closeEvent(self, *args):
"""
Store the size of the window.
"""
self._size = self.size()
super(TextSubWindow, self).closeEvent(*args)
|
from PyQt5 import QtWidgets
class TextSubWindow(QtWidgets.QTextEdit):
"""
TextEdit that saves it size when it closes and closes itself if the main widget disappears.
"""
def __init__(self):
super(TextSubWindow, self).__init__()
self._size = None
def sizeHint(self, *args):
"""
Return the saved size.
"""
if self._size:
return self._size
else:
return super(TextSubWindow, self).size()
def closeEvent(self, *args):
"""
Store the size of the window.
"""
self._size = self.size()
super(TextSubWindow, self).closeEvent(*args)
|
Fix problem with copying from text window.
|
Fix problem with copying from text window.
closes #9843
|
Python
|
lgpl-2.1
|
harterj/moose,jessecarterMOOSE/moose,yipenggao/moose,yipenggao/moose,milljm/moose,harterj/moose,laagesen/moose,Chuban/moose,andrsd/moose,permcody/moose,dschwen/moose,milljm/moose,laagesen/moose,permcody/moose,laagesen/moose,nuclear-wizard/moose,jessecarterMOOSE/moose,bwspenc/moose,dschwen/moose,sapitts/moose,yipenggao/moose,bwspenc/moose,friedmud/moose,YaqiWang/moose,YaqiWang/moose,milljm/moose,nuclear-wizard/moose,Chuban/moose,jessecarterMOOSE/moose,laagesen/moose,andrsd/moose,harterj/moose,lindsayad/moose,bwspenc/moose,friedmud/moose,lindsayad/moose,friedmud/moose,permcody/moose,dschwen/moose,bwspenc/moose,nuclear-wizard/moose,jessecarterMOOSE/moose,lindsayad/moose,SudiptaBiswas/moose,idaholab/moose,harterj/moose,yipenggao/moose,lindsayad/moose,jessecarterMOOSE/moose,dschwen/moose,bwspenc/moose,idaholab/moose,milljm/moose,Chuban/moose,sapitts/moose,idaholab/moose,laagesen/moose,milljm/moose,YaqiWang/moose,andrsd/moose,YaqiWang/moose,dschwen/moose,lindsayad/moose,sapitts/moose,Chuban/moose,SudiptaBiswas/moose,harterj/moose,idaholab/moose,andrsd/moose,nuclear-wizard/moose,sapitts/moose,andrsd/moose,SudiptaBiswas/moose,friedmud/moose,SudiptaBiswas/moose,SudiptaBiswas/moose,idaholab/moose,sapitts/moose,permcody/moose
|
python
|
## Code Before:
from PyQt5 import QtCore, QtWidgets
class TextSubWindow(QtWidgets.QTextEdit):
"""
TextEdit that saves it size when it closes and closes itself if the main widget disappears.
"""
def __init__(self):
super(TextSubWindow, self).__init__()
self.setWindowFlags(QtCore.Qt.SubWindow)
self._size = None
def sizeHint(self, *args):
"""
Return the saved size.
"""
if self._size:
return self._size
else:
return super(TextSubWindow, self).size()
def closeEvent(self, *args):
"""
Store the size of the window.
"""
self._size = self.size()
super(TextSubWindow, self).closeEvent(*args)
## Instruction:
Fix problem with copying from text window.
closes #9843
## Code After:
from PyQt5 import QtWidgets
class TextSubWindow(QtWidgets.QTextEdit):
"""
TextEdit that saves it size when it closes and closes itself if the main widget disappears.
"""
def __init__(self):
super(TextSubWindow, self).__init__()
self._size = None
def sizeHint(self, *args):
"""
Return the saved size.
"""
if self._size:
return self._size
else:
return super(TextSubWindow, self).size()
def closeEvent(self, *args):
"""
Store the size of the window.
"""
self._size = self.size()
super(TextSubWindow, self).closeEvent(*args)
|
// ... existing code ...
from PyQt5 import QtWidgets
class TextSubWindow(QtWidgets.QTextEdit):
"""
TextEdit that saves it size when it closes and closes itself if the main widget disappears.
// ... modified code ...
"""
def __init__(self):
super(TextSubWindow, self).__init__()
self._size = None
def sizeHint(self, *args):
// ... rest of the code ...
|
34977275dc0502896846e937097d18d31103bcb0
|
tests/conftest.py
|
tests/conftest.py
|
"""Global test configuration"""
import os
from pathlib import Path
import betamax
import pytest
from mccurse import curse
# Ensure cassete dir
CASSETE_DIR = 'tests/cassetes/'
if not os.path.exists(CASSETE_DIR):
os.makedirs(CASSETE_DIR)
record_mode = 'none' if os.environ.get('TRAVIS_BUILD') else 'once'
with betamax.Betamax.configure() as config:
config.cassette_library_dir = CASSETE_DIR
config.default_cassette_options.update({
'record_mode': record_mode,
'preserve_exact_body_bytes': True,
})
# Shared fixtures
@pytest.fixture
def file_database(tmpdir) -> curse.Database:
"""Database potentially located in temp dir."""
return curse.Database('test', Path(str(tmpdir)))
|
"""Global test configuration"""
import os
from pathlib import Path
import betamax
import pytest
from mccurse import addon, curse
# Ensure cassete dir
CASSETE_DIR = 'tests/cassetes/'
if not os.path.exists(CASSETE_DIR):
os.makedirs(CASSETE_DIR)
record_mode = 'none' if os.environ.get('TRAVIS_BUILD') else 'once'
with betamax.Betamax.configure() as config:
config.cassette_library_dir = CASSETE_DIR
config.default_cassette_options.update({
'record_mode': record_mode,
'preserve_exact_body_bytes': True,
})
# Shared fixtures
@pytest.fixture
def file_database(tmpdir) -> curse.Database:
"""Database potentially located in temp dir."""
return curse.Database('test', Path(str(tmpdir)))
@pytest.fixture
def tinkers_construct() -> addon.Mod:
"""Tinkers Construct project data"""
data = {
'name': 'Tinkers Construct',
'id': 74072,
'summary': 'Modify all the things, then do it again!',
}
return addon.Mod(**data)
@pytest.fixture
def minecraft() -> curse.Game:
"""Minecraft version for testing."""
data = {
'name': 'Minecraft',
'id': 432,
'version': '1.10.2',
}
return curse.Game(**data)
|
Add shared fixtures for Mod and Game
|
Add shared fixtures for Mod and Game
|
Python
|
agpl-3.0
|
khardix/mccurse
|
python
|
## Code Before:
"""Global test configuration"""
import os
from pathlib import Path
import betamax
import pytest
from mccurse import curse
# Ensure cassete dir
CASSETE_DIR = 'tests/cassetes/'
if not os.path.exists(CASSETE_DIR):
os.makedirs(CASSETE_DIR)
record_mode = 'none' if os.environ.get('TRAVIS_BUILD') else 'once'
with betamax.Betamax.configure() as config:
config.cassette_library_dir = CASSETE_DIR
config.default_cassette_options.update({
'record_mode': record_mode,
'preserve_exact_body_bytes': True,
})
# Shared fixtures
@pytest.fixture
def file_database(tmpdir) -> curse.Database:
"""Database potentially located in temp dir."""
return curse.Database('test', Path(str(tmpdir)))
## Instruction:
Add shared fixtures for Mod and Game
## Code After:
"""Global test configuration"""
import os
from pathlib import Path
import betamax
import pytest
from mccurse import addon, curse
# Ensure cassete dir
CASSETE_DIR = 'tests/cassetes/'
if not os.path.exists(CASSETE_DIR):
os.makedirs(CASSETE_DIR)
record_mode = 'none' if os.environ.get('TRAVIS_BUILD') else 'once'
with betamax.Betamax.configure() as config:
config.cassette_library_dir = CASSETE_DIR
config.default_cassette_options.update({
'record_mode': record_mode,
'preserve_exact_body_bytes': True,
})
# Shared fixtures
@pytest.fixture
def file_database(tmpdir) -> curse.Database:
"""Database potentially located in temp dir."""
return curse.Database('test', Path(str(tmpdir)))
@pytest.fixture
def tinkers_construct() -> addon.Mod:
"""Tinkers Construct project data"""
data = {
'name': 'Tinkers Construct',
'id': 74072,
'summary': 'Modify all the things, then do it again!',
}
return addon.Mod(**data)
@pytest.fixture
def minecraft() -> curse.Game:
"""Minecraft version for testing."""
data = {
'name': 'Minecraft',
'id': 432,
'version': '1.10.2',
}
return curse.Game(**data)
|
// ... existing code ...
import betamax
import pytest
from mccurse import addon, curse
# Ensure cassete dir
// ... modified code ...
"""Database potentially located in temp dir."""
return curse.Database('test', Path(str(tmpdir)))
@pytest.fixture
def tinkers_construct() -> addon.Mod:
"""Tinkers Construct project data"""
data = {
'name': 'Tinkers Construct',
'id': 74072,
'summary': 'Modify all the things, then do it again!',
}
return addon.Mod(**data)
@pytest.fixture
def minecraft() -> curse.Game:
"""Minecraft version for testing."""
data = {
'name': 'Minecraft',
'id': 432,
'version': '1.10.2',
}
return curse.Game(**data)
// ... rest of the code ...
|
c6d4a0e34a0e1ef1ea330734477aac434322ff01
|
extensions/ExtGameController.py
|
extensions/ExtGameController.py
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
Update extensions and GameController subclass
|
Update extensions and GameController subclass
|
Python
|
apache-2.0
|
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
|
python
|
## Code Before:
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
## Instruction:
Update extensions and GameController subclass
## Code After:
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
...
]
def __init__(self, game_modes=None, mode=None, game_json=None):
if game_modes is not None and not isinstance(game_modes, list):
raise TypeError("ExtGameController expected a list of GameMode objects")
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
...
|
f367d3122084c85e11efeb20d560a856e9f24d0e
|
zuice/django.py
|
zuice/django.py
|
django = __import__("django.conf.urls.defaults", {})
from zuice import Injector
def _view_builder(bindings):
def view(request, view_class, **kwargs):
view_injector = Injector(bindings)
view = view_injector.get_from_type(view_class)
bindings_for_response = bindings.copy()
bindings_for_response.bind('request').to_instance(request)
for item in kwargs.iteritems():
bindings_for_response.bind_name(item[0]).to_instance(item[1])
response_injector = Injector(bindings_for_response)
response = response_injector.call(view.respond)
return response.render(request)
return view
def url_to_class_builder(bindings):
def url_to_class(regex, view_class, kwargs=None, name=None):
if kwargs is None:
kwargs = {}
kwargs['view_class'] = view_class
return django.conf.urls.defaults.url(regex, _view_builder(bindings), kwargs, name=name)
return url_to_class
|
django = __import__("django.conf.urls.defaults", {})
from zuice import Injector
def _view_builder(bindings):
def view(request, view_class, **kwargs):
view_injector = Injector(bindings)
view = view_injector.get_from_type(view_class)
bindings_for_response = bindings.copy()
bindings_for_response.bind('request').to_instance(request)
for item in kwargs.iteritems():
bindings_for_response.bind_name(item[0]).to_instance(item[1])
response_injector = Injector(bindings_for_response)
response = response_injector.call(view.respond)
return response.render(request)
return view
def url_to_class_builder(bindings):
view = _view_builder(bindings.copy())
def url_to_class(regex, view_class, kwargs=None, name=None):
if kwargs is None:
kwargs = {}
kwargs['view_class'] = view_class
return django.conf.urls.defaults.url(regex, view, kwargs, name=name)
return url_to_class
|
Refactor url_to_class_builder so that the view is only built once
|
Refactor url_to_class_builder so that the view is only built once
|
Python
|
bsd-2-clause
|
mwilliamson/zuice
|
python
|
## Code Before:
django = __import__("django.conf.urls.defaults", {})
from zuice import Injector
def _view_builder(bindings):
def view(request, view_class, **kwargs):
view_injector = Injector(bindings)
view = view_injector.get_from_type(view_class)
bindings_for_response = bindings.copy()
bindings_for_response.bind('request').to_instance(request)
for item in kwargs.iteritems():
bindings_for_response.bind_name(item[0]).to_instance(item[1])
response_injector = Injector(bindings_for_response)
response = response_injector.call(view.respond)
return response.render(request)
return view
def url_to_class_builder(bindings):
def url_to_class(regex, view_class, kwargs=None, name=None):
if kwargs is None:
kwargs = {}
kwargs['view_class'] = view_class
return django.conf.urls.defaults.url(regex, _view_builder(bindings), kwargs, name=name)
return url_to_class
## Instruction:
Refactor url_to_class_builder so that the view is only built once
## Code After:
django = __import__("django.conf.urls.defaults", {})
from zuice import Injector
def _view_builder(bindings):
def view(request, view_class, **kwargs):
view_injector = Injector(bindings)
view = view_injector.get_from_type(view_class)
bindings_for_response = bindings.copy()
bindings_for_response.bind('request').to_instance(request)
for item in kwargs.iteritems():
bindings_for_response.bind_name(item[0]).to_instance(item[1])
response_injector = Injector(bindings_for_response)
response = response_injector.call(view.respond)
return response.render(request)
return view
def url_to_class_builder(bindings):
view = _view_builder(bindings.copy())
def url_to_class(regex, view_class, kwargs=None, name=None):
if kwargs is None:
kwargs = {}
kwargs['view_class'] = view_class
return django.conf.urls.defaults.url(regex, view, kwargs, name=name)
return url_to_class
|
# ... existing code ...
return view
def url_to_class_builder(bindings):
view = _view_builder(bindings.copy())
def url_to_class(regex, view_class, kwargs=None, name=None):
if kwargs is None:
kwargs = {}
kwargs['view_class'] = view_class
return django.conf.urls.defaults.url(regex, view, kwargs, name=name)
return url_to_class
# ... rest of the code ...
|
ffadde617db8ac3d0d5362b4a521dd4e9839710f
|
order/order_2_login_system_by_https.py
|
order/order_2_login_system_by_https.py
|
import json
import requests
""" Order 2: Login system by https
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
Order 2: Login system by https
|
[Order] Order 2: Login system by https
|
Python
|
mit
|
flyingSprite/spinelle
|
python
|
## Code Before:
import json
import requests
""" Order 2: Login system by https
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
## Instruction:
[Order] Order 2: Login system by https
## Code After:
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
# ... existing code ...
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
# ... rest of the code ...
|
8da70953568e44c46d7aebeea3147c029135a824
|
MRHexKeyboard.h
|
MRHexKeyboard.h
|
//
// MRHexKeyboard.h
//
// Created by Mikk Rätsep on 02/10/13.
// Copyright (c) 2013 Mikk Rätsep. All rights reserved.
//
@import UIKit;
@interface MRHexKeyboard : UIView <UITextFieldDelegate>
@property(nonatomic, assign) CGFloat height;
@property(nonatomic, assign) BOOL display0xButton;
@property(nonatomic, assign) BOOL add0x;
@end
|
//
// MRHexKeyboard.h
//
// Created by Mikk Rätsep on 02/10/13.
// Copyright (c) 2013 Mikk Rätsep. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface MRHexKeyboard : UIView <UITextFieldDelegate>
@property(nonatomic, assign) CGFloat height;
@property(nonatomic, assign) BOOL display0xButton;
@property(nonatomic, assign) BOOL add0x;
@end
|
Switch to importing framework header
|
Switch to importing framework header
Some Apps may not use modules
|
C
|
mit
|
doofyus/HexKeyboard
|
c
|
## Code Before:
//
// MRHexKeyboard.h
//
// Created by Mikk Rätsep on 02/10/13.
// Copyright (c) 2013 Mikk Rätsep. All rights reserved.
//
@import UIKit;
@interface MRHexKeyboard : UIView <UITextFieldDelegate>
@property(nonatomic, assign) CGFloat height;
@property(nonatomic, assign) BOOL display0xButton;
@property(nonatomic, assign) BOOL add0x;
@end
## Instruction:
Switch to importing framework header
Some Apps may not use modules
## Code After:
//
// MRHexKeyboard.h
//
// Created by Mikk Rätsep on 02/10/13.
// Copyright (c) 2013 Mikk Rätsep. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface MRHexKeyboard : UIView <UITextFieldDelegate>
@property(nonatomic, assign) CGFloat height;
@property(nonatomic, assign) BOOL display0xButton;
@property(nonatomic, assign) BOOL add0x;
@end
|
# ... existing code ...
// Copyright (c) 2013 Mikk Rätsep. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface MRHexKeyboard : UIView <UITextFieldDelegate>
# ... rest of the code ...
|
6e71b0de777bf516d376397961ec232ec39ea195
|
setup.py
|
setup.py
|
from setuptools import setup
try:
from pypandoc import convert
read_md = lambda f: convert(f, 'rst')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
setup(name='centerline',
version='0.1',
description='Calculate the centerline of a polygon',
long_description=read_md('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS'
],
url='https://github.com/fitodic/centerline.git',
author='Filip Todic',
author_email='[email protected]',
license='MIT',
packages=['centerline'],
install_requires=['numpy', 'scipy', 'Shapely', 'GDAL', 'click', 'cligj', 'six', 'Fiona'],
scripts=['bin/shp2centerline'],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
try:
from pypandoc import convert
def read_md():
return lambda f: convert(f, 'rst')
except ImportError:
print(
"warning: pypandoc module not found, could not convert Markdown to RST"
)
def read_md():
return lambda f: open(f, 'r').read()
setup(name='centerline',
version='0.1',
description='Calculate the centerline of a polygon',
long_description=read_md('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS'
],
url='https://github.com/fitodic/centerline.git',
author='Filip Todic',
author_email='[email protected]',
license='MIT',
packages=['centerline'],
install_requires=['numpy', 'scipy', 'Shapely', 'GDAL', 'click', 'cligj', 'six', 'Fiona'],
scripts=['bin/shp2centerline'],
include_package_data=True,
zip_safe=False)
|
Define a MD->RST conversion function
|
Define a MD->RST conversion function
|
Python
|
mit
|
fitodic/centerline,fitodic/polygon-centerline,fitodic/centerline
|
python
|
## Code Before:
from setuptools import setup
try:
from pypandoc import convert
read_md = lambda f: convert(f, 'rst')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
setup(name='centerline',
version='0.1',
description='Calculate the centerline of a polygon',
long_description=read_md('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS'
],
url='https://github.com/fitodic/centerline.git',
author='Filip Todic',
author_email='[email protected]',
license='MIT',
packages=['centerline'],
install_requires=['numpy', 'scipy', 'Shapely', 'GDAL', 'click', 'cligj', 'six', 'Fiona'],
scripts=['bin/shp2centerline'],
include_package_data=True,
zip_safe=False)
## Instruction:
Define a MD->RST conversion function
## Code After:
from setuptools import setup
try:
from pypandoc import convert
def read_md():
return lambda f: convert(f, 'rst')
except ImportError:
print(
"warning: pypandoc module not found, could not convert Markdown to RST"
)
def read_md():
return lambda f: open(f, 'r').read()
setup(name='centerline',
version='0.1',
description='Calculate the centerline of a polygon',
long_description=read_md('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS'
],
url='https://github.com/fitodic/centerline.git',
author='Filip Todic',
author_email='[email protected]',
license='MIT',
packages=['centerline'],
install_requires=['numpy', 'scipy', 'Shapely', 'GDAL', 'click', 'cligj', 'six', 'Fiona'],
scripts=['bin/shp2centerline'],
include_package_data=True,
zip_safe=False)
|
# ... existing code ...
try:
from pypandoc import convert
def read_md():
return lambda f: convert(f, 'rst')
except ImportError:
print(
"warning: pypandoc module not found, could not convert Markdown to RST"
)
def read_md():
return lambda f: open(f, 'r').read()
setup(name='centerline',
version='0.1',
# ... rest of the code ...
|
0232afac110e2cf9f841e861bd9622bcaf79616a
|
tensorbayes/distributions.py
|
tensorbayes/distributions.py
|
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
|
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
if eps > 0.0:
qv = tf.add(qv, eps, name='clipped_var1')
pv = tf.add(qv, eps, name='clipped_var2')
return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
tf.square(qm - pm) / pv - 1, axis=-1)
|
Add tf implementation of KL between normals
|
Add tf implementation of KL between normals
|
Python
|
mit
|
RuiShu/tensorbayes
|
python
|
## Code Before:
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
## Instruction:
Add tf implementation of KL between normals
## Code After:
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
if eps > 0.0:
qv = tf.add(qv, eps, name='clipped_var1')
pv = tf.add(qv, eps, name='clipped_var2')
return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
tf.square(qm - pm) / pv - 1, axis=-1)
|
// ... existing code ...
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
if eps > 0.0:
qv = tf.add(qv, eps, name='clipped_var1')
pv = tf.add(qv, eps, name='clipped_var2')
return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
tf.square(qm - pm) / pv - 1, axis=-1)
// ... rest of the code ...
|
b25164e69d255beae1a76a9e1f7168a436a81f38
|
tests/test_utils.py
|
tests/test_utils.py
|
import helper
from rock import utils
class UtilsTestCase(helper.unittest.TestCase):
def test_shell(self):
utils.Shell.run = lambda self: self
s = utils.Shell()
self.assertTrue(isinstance(s.__enter__(), utils.Shell))
s.write('ok')
s.__exit__(None, None, None)
self.assertEqual(s.stdin.getvalue(), 'ok\n')
def execl(*args):
self.assertEqual(len(args), 4)
self.assertEqual(args[0], '/bin/bash')
self.assertEqual(args[1], '-l')
self.assertEqual(args[2], '-c')
self.assertEqual(args[3], 'ok\n')
utils.os.execl = execl
s.__exit__('type', 'value', 'tracebook')
|
import helper
from rock import utils
from rock.exceptions import ConfigError
class UtilsTestCase(helper.unittest.TestCase):
def test_shell(self):
utils.Shell.run = lambda self: self
s = utils.Shell()
self.assertTrue(isinstance(s.__enter__(), utils.Shell))
s.write('ok')
s.__exit__(None, None, None)
self.assertEqual(s.stdin.getvalue(), 'ok\n')
def execl(*args):
self.assertEqual(len(args), 4)
self.assertEqual(args[0], '/bin/bash')
self.assertEqual(args[1], '-l')
self.assertEqual(args[2], '-c')
self.assertEqual(args[3], 'ok\n')
utils.os.execl = execl
s.__exit__('type', 'value', 'tracebook')
def test_noshell(self):
utils.ROCK_SHELL = '/tmp/hopefully-no-exists'
s = utils.Shell()
s.__enter__()
self.assertRaises(ConfigError, s.__exit__, 'type', 'value', 'tracebook')
|
Test isexecutable check in utils.Shell
|
Test isexecutable check in utils.Shell
|
Python
|
mit
|
silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock
|
python
|
## Code Before:
import helper
from rock import utils
class UtilsTestCase(helper.unittest.TestCase):
def test_shell(self):
utils.Shell.run = lambda self: self
s = utils.Shell()
self.assertTrue(isinstance(s.__enter__(), utils.Shell))
s.write('ok')
s.__exit__(None, None, None)
self.assertEqual(s.stdin.getvalue(), 'ok\n')
def execl(*args):
self.assertEqual(len(args), 4)
self.assertEqual(args[0], '/bin/bash')
self.assertEqual(args[1], '-l')
self.assertEqual(args[2], '-c')
self.assertEqual(args[3], 'ok\n')
utils.os.execl = execl
s.__exit__('type', 'value', 'tracebook')
## Instruction:
Test isexecutable check in utils.Shell
## Code After:
import helper
from rock import utils
from rock.exceptions import ConfigError
class UtilsTestCase(helper.unittest.TestCase):
def test_shell(self):
utils.Shell.run = lambda self: self
s = utils.Shell()
self.assertTrue(isinstance(s.__enter__(), utils.Shell))
s.write('ok')
s.__exit__(None, None, None)
self.assertEqual(s.stdin.getvalue(), 'ok\n')
def execl(*args):
self.assertEqual(len(args), 4)
self.assertEqual(args[0], '/bin/bash')
self.assertEqual(args[1], '-l')
self.assertEqual(args[2], '-c')
self.assertEqual(args[3], 'ok\n')
utils.os.execl = execl
s.__exit__('type', 'value', 'tracebook')
def test_noshell(self):
utils.ROCK_SHELL = '/tmp/hopefully-no-exists'
s = utils.Shell()
s.__enter__()
self.assertRaises(ConfigError, s.__exit__, 'type', 'value', 'tracebook')
|
# ... existing code ...
import helper
from rock import utils
from rock.exceptions import ConfigError
class UtilsTestCase(helper.unittest.TestCase):
# ... modified code ...
self.assertEqual(args[3], 'ok\n')
utils.os.execl = execl
s.__exit__('type', 'value', 'tracebook')
def test_noshell(self):
utils.ROCK_SHELL = '/tmp/hopefully-no-exists'
s = utils.Shell()
s.__enter__()
self.assertRaises(ConfigError, s.__exit__, 'type', 'value', 'tracebook')
# ... rest of the code ...
|
9b0618d3b52c74bf2abd65a581807087cbaa2ca4
|
grammpy_transforms/NongeneratingSymbolsRemove/nongeneratingSymbolsRemove.py
|
grammpy_transforms/NongeneratingSymbolsRemove/nongeneratingSymbolsRemove.py
|
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
|
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
|
Switch to new version of grammpy (1.1.2) and use copy method
|
Switch to new version of grammpy (1.1.2) and use copy method
|
Python
|
mit
|
PatrikValkovic/grammpy
|
python
|
## Code Before:
from grammpy import Grammar
def _copy_grammar(grammar):
return Grammar(terminals=(item.s for item in grammar.terms()),
nonterminals=grammar.nonterms(),
rules=grammar.rules(),
start_symbol=grammar.start_get())
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
## Instruction:
Switch to new version of grammpy (1.1.2) and use copy method
## Code After:
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
grammar = _copy_grammar(grammar)
generates = set(item.s for item in grammar.terms())
while True:
additional = generates.copy()
for rule in grammar.rules():
rightPart = rule.right
allIn = True
for symbol in rightPart:
if symbol not in generates:
allIn = False
if not allIn:
continue
for symbol in rule.left:
additional.add(symbol)
if additional == generates:
break
generates = additional
allNonterms = list(grammar.nonterms())
for nonterm in allNonterms:
if nonterm not in generates:
grammar.remove_nonterm(nonterm)
return grammar
|
// ... existing code ...
from copy import copy
from grammpy import Grammar
def _copy_grammar(grammar):
return copy(grammar)
def remove_nongenerating_symbol(grammar: Grammar, transform_grammar=False) -> Grammar:
if transform_grammar is False:
// ... rest of the code ...
|
7bc5ce4a79c61ab7238b188f9af48f41ff1392f9
|
fuzz/fuzzer.h
|
fuzz/fuzzer.h
|
/*
* Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* https://www.openssl.org/source/license.html
* or in the file LICENSE in the source distribution.
*/
#include <stdint.h> /* for uint8_t */
#include <stddef.h> /* for size_t */
int FuzzerTestOneInput(const uint8_t *buf, size_t len);
int FuzzerInitialize(int *argc, char ***argv);
void FuzzerCleanup(void);
void FuzzerSetRand(void);
void FuzzerClearRand(void);
|
/*
* Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* https://www.openssl.org/source/license.html
* or in the file LICENSE in the source distribution.
*/
#include <stddef.h> /* for size_t */
#include <openssl/e_os2.h> /* for uint8_t */
int FuzzerTestOneInput(const uint8_t *buf, size_t len);
int FuzzerInitialize(int *argc, char ***argv);
void FuzzerCleanup(void);
void FuzzerSetRand(void);
void FuzzerClearRand(void);
|
Use <openssl/e_os2.h> rather than <stdint.h>
|
Use <openssl/e_os2.h> rather than <stdint.h>
<stdint.h> is C99, which means that on older compiler, it can't be included.
We have code in <openssl/e_os2.h> that compensates.
Reviewed-by: Matt Caswell <[email protected]>
Reviewed-by: Tomas Mraz <[email protected]>
(Merged from https://github.com/openssl/openssl/pull/19697)
|
C
|
apache-2.0
|
openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl
|
c
|
## Code Before:
/*
* Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* https://www.openssl.org/source/license.html
* or in the file LICENSE in the source distribution.
*/
#include <stdint.h> /* for uint8_t */
#include <stddef.h> /* for size_t */
int FuzzerTestOneInput(const uint8_t *buf, size_t len);
int FuzzerInitialize(int *argc, char ***argv);
void FuzzerCleanup(void);
void FuzzerSetRand(void);
void FuzzerClearRand(void);
## Instruction:
Use <openssl/e_os2.h> rather than <stdint.h>
<stdint.h> is C99, which means that on older compiler, it can't be included.
We have code in <openssl/e_os2.h> that compensates.
Reviewed-by: Matt Caswell <[email protected]>
Reviewed-by: Tomas Mraz <[email protected]>
(Merged from https://github.com/openssl/openssl/pull/19697)
## Code After:
/*
* Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* https://www.openssl.org/source/license.html
* or in the file LICENSE in the source distribution.
*/
#include <stddef.h> /* for size_t */
#include <openssl/e_os2.h> /* for uint8_t */
int FuzzerTestOneInput(const uint8_t *buf, size_t len);
int FuzzerInitialize(int *argc, char ***argv);
void FuzzerCleanup(void);
void FuzzerSetRand(void);
void FuzzerClearRand(void);
|
// ... existing code ...
* or in the file LICENSE in the source distribution.
*/
#include <stddef.h> /* for size_t */
#include <openssl/e_os2.h> /* for uint8_t */
int FuzzerTestOneInput(const uint8_t *buf, size_t len);
int FuzzerInitialize(int *argc, char ***argv);
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.