commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
6620032e9f8574c3e1dad37c111040eca570a751
|
features/memberships/models.py
|
features/memberships/models.py
|
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
class Membership(models.Model):
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Meta:
unique_together = ('group', 'member')
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
from . import querysets
class Membership(models.Model):
class Meta:
unique_together = ('group', 'member')
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
Add queryset for ordering memberships by activity
|
Add queryset for ordering memberships by activity
|
Python
|
agpl-3.0
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
python
|
## Code Before:
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
class Membership(models.Model):
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Meta:
unique_together = ('group', 'member')
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
## Instruction:
Add queryset for ordering memberships by activity
## Code After:
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
from . import querysets
class Membership(models.Model):
class Meta:
unique_together = ('group', 'member')
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Application(models.Model):
group = models.ForeignKey('groups.Group', related_name='applications')
contributions = contenttypes.GenericRelation(
'contributions.Contribution',
content_type_field='contribution_type',
object_id_field='contribution_id',
related_query_name='membership_application')
@property
def contribution(self):
return self.contributions.first()
|
...
from django.contrib.contenttypes import fields as contenttypes
from django.db import models
from . import querysets
class Membership(models.Model):
class Meta:
unique_together = ('group', 'member')
created_by = models.ForeignKey(
'gestalten.Gestalt', related_name='memberships_created')
date_joined = models.DateField(auto_now_add=True)
...
group = models.ForeignKey('groups.Group', related_name='memberships')
member = models.ForeignKey('gestalten.Gestalt', related_name='memberships')
objects = models.Manager.from_queryset(querysets.MembershipQuerySet)()
def __str__(self):
return "%s is member of %s since %s" % (
str(self.member.user.get_username()),
str(self.group.slug), str(self.date_joined)
)
class Application(models.Model):
...
|
9d68808a363ad00c3fc0b0907d625e5c75bdb8ae
|
ptt_preproc_sampling.py
|
ptt_preproc_sampling.py
|
from pathlib import Path
from random import shuffle
from shutil import copy
# configs
N = 10000
SAMPLED_DIR_PATH = Path('sampled/')
# mkdir if doesn't exist
if not SAMPLED_DIR_PATH.exists():
SAMPLED_DIR_PATH.mkdir()
# sample and copy
paths = [p for p in Path('preprocessed/').iterdir()]
shuffle(paths)
for p in paths[:N]:
copy(str(p), str(SAMPLED_DIR_PATH / p.name))
|
from pathlib import Path
from random import sample
from os import remove
# configs
N = 10000
# remove unsampled
paths = [path for path in Path('preprocessed/').iterdir()]
paths_len = len(paths)
if paths_len <= N:
raise RuntimeError('file count {:,} <= N {:,}'.format(paths_len, N))
for path in sample(paths, paths_len-N):
remove(str(path))
|
Use removing rather than copying
|
Use removing rather than copying
|
Python
|
mit
|
moskytw/mining-news
|
python
|
## Code Before:
from pathlib import Path
from random import shuffle
from shutil import copy
# configs
N = 10000
SAMPLED_DIR_PATH = Path('sampled/')
# mkdir if doesn't exist
if not SAMPLED_DIR_PATH.exists():
SAMPLED_DIR_PATH.mkdir()
# sample and copy
paths = [p for p in Path('preprocessed/').iterdir()]
shuffle(paths)
for p in paths[:N]:
copy(str(p), str(SAMPLED_DIR_PATH / p.name))
## Instruction:
Use removing rather than copying
## Code After:
from pathlib import Path
from random import sample
from os import remove
# configs
N = 10000
# remove unsampled
paths = [path for path in Path('preprocessed/').iterdir()]
paths_len = len(paths)
if paths_len <= N:
raise RuntimeError('file count {:,} <= N {:,}'.format(paths_len, N))
for path in sample(paths, paths_len-N):
remove(str(path))
|
// ... existing code ...
from pathlib import Path
from random import sample
from os import remove
# configs
N = 10000
# remove unsampled
paths = [path for path in Path('preprocessed/').iterdir()]
paths_len = len(paths)
if paths_len <= N:
raise RuntimeError('file count {:,} <= N {:,}'.format(paths_len, N))
for path in sample(paths, paths_len-N):
remove(str(path))
// ... rest of the code ...
|
cb63e07c588dc7030f3f41812dea73f3571e449a
|
test/Frontend/diagnostic-name.c
|
test/Frontend/diagnostic-name.c
|
// RUN: %clang -Wunused-parameter -fdiagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 1
// RUN: %clang -Wunused-parameter -fno-diagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 0
int main(int argc, char *argv[]) {
return argc;
}
|
// RUN: %clang -fsyntax-only -Wunused-parameter -fdiagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 1
// RUN: %clang -fsyntax-only -Wunused-parameter -fno-diagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 0
int main(int argc, char *argv[]) {
return argc;
}
|
Stop leaving a.out files around.
|
Stop leaving a.out files around.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@131396 91177308-0d34-0410-b5e6-96231b3b80d8
|
C
|
apache-2.0
|
apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang
|
c
|
## Code Before:
// RUN: %clang -Wunused-parameter -fdiagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 1
// RUN: %clang -Wunused-parameter -fno-diagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 0
int main(int argc, char *argv[]) {
return argc;
}
## Instruction:
Stop leaving a.out files around.
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@131396 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %clang -fsyntax-only -Wunused-parameter -fdiagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 1
// RUN: %clang -fsyntax-only -Wunused-parameter -fno-diagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 0
int main(int argc, char *argv[]) {
return argc;
}
|
...
// RUN: %clang -fsyntax-only -Wunused-parameter -fdiagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 1
// RUN: %clang -fsyntax-only -Wunused-parameter -fno-diagnostics-show-name %s 2>&1 | grep "\[warn_unused_parameter\]" | count 0
int main(int argc, char *argv[]) {
return argc;
}
...
|
afd6b5b29b60c59689e0a1be38a0483a7e4db312
|
miniraf/__init__.py
|
miniraf/__init__.py
|
import argparse
import astropy.io.fits as fits
import numpy as np
import calc
import combine
if __name__=="__main__":
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
args = argparser.parse_args()
print(args)
args.func(args)
|
import argparse
import calc
import combine
from combine import stack_fits_data
from calc import load_fits_data
def _argparse():
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
return argparser.parse_args()
def main():
args = _argparse()
args.func(args)
if __name__=="__main__":
main()
|
Create main() entry point for final script
|
Create main() entry point for final script
Signed-off-by: Lizhou Sha <[email protected]>
|
Python
|
mit
|
vulpicastor/miniraf
|
python
|
## Code Before:
import argparse
import astropy.io.fits as fits
import numpy as np
import calc
import combine
if __name__=="__main__":
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
args = argparser.parse_args()
print(args)
args.func(args)
## Instruction:
Create main() entry point for final script
Signed-off-by: Lizhou Sha <[email protected]>
## Code After:
import argparse
import calc
import combine
from combine import stack_fits_data
from calc import load_fits_data
def _argparse():
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
return argparser.parse_args()
def main():
args = _argparse()
args.func(args)
if __name__=="__main__":
main()
|
// ... existing code ...
import argparse
import calc
import combine
from combine import stack_fits_data
from calc import load_fits_data
def _argparse():
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
return argparser.parse_args()
def main():
args = _argparse()
args.func(args)
if __name__=="__main__":
main()
// ... rest of the code ...
|
2f5c14a8f0c532f5a3f1d2a86d393cb59bd54d9f
|
web/src/main/java/uk/ac/ebi/atlas/commons/writers/FileTsvWriterBuilder.java
|
web/src/main/java/uk/ac/ebi/atlas/commons/writers/FileTsvWriterBuilder.java
|
package uk.ac.ebi.atlas.commons.writers;
import org.springframework.context.annotation.Scope;
import uk.ac.ebi.atlas.commons.readers.TsvReader;
import uk.ac.ebi.atlas.commons.readers.impl.TsvReaderImpl;
import uk.ac.ebi.atlas.commons.writers.impl.TsvWriterImpl;
import javax.inject.Named;
import java.io.*;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.MessageFormat;
@Named
@Scope("prototype")
public class FileTsvWriterBuilder {
private String experimentAccession;
private String tsvFilePathTemplate;
private boolean append = true;
public FileTsvWriterBuilder() {
}
public FileTsvWriterBuilder withExperimentAccession(String experimentAccession) {
this.experimentAccession = experimentAccession;
return this;
}
public FileTsvWriterBuilder forTsvFilePathTemplate(String tsvFilePathTemplate) {
this.tsvFilePathTemplate = tsvFilePathTemplate;
return this;
}
public FileTsvWriterBuilder withAppend(boolean append) {
this.append = append;
return this;
}
public TsvWriter build() {
String tsvFilePath = MessageFormat.format(tsvFilePathTemplate, experimentAccession);
try {
return new TsvWriterImpl(new OutputStreamWriter(new FileOutputStream(new File(tsvFilePath), append)));
} catch (IOException e) {
throw new IllegalStateException("Cannot write TSV file to path " + tsvFilePath.toString(), e);
}
}
}
|
package uk.ac.ebi.atlas.commons.writers;
import org.springframework.context.annotation.Scope;
import uk.ac.ebi.atlas.commons.writers.impl.TsvWriterImpl;
import javax.inject.Named;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.text.MessageFormat;
@Named
@Scope("prototype")
public class FileTsvWriterBuilder {
private String experimentAccession;
private String tsvFilePathTemplate;
private boolean append = true;
public FileTsvWriterBuilder() {
}
public FileTsvWriterBuilder withExperimentAccession(String experimentAccession) {
this.experimentAccession = experimentAccession;
return this;
}
public FileTsvWriterBuilder forTsvFilePathTemplate(String tsvFilePathTemplate) {
this.tsvFilePathTemplate = tsvFilePathTemplate;
return this;
}
public FileTsvWriterBuilder withAppend(boolean append) {
this.append = append;
return this;
}
public TsvWriter build() {
String tsvFilePath = MessageFormat.format(tsvFilePathTemplate, experimentAccession);
try {
return new TsvWriterImpl(new OutputStreamWriter(new FileOutputStream(new File(tsvFilePath), append)));
} catch (IOException e) {
throw new IllegalStateException("Cannot write TSV file to path " + tsvFilePath, e);
}
}
}
|
Clean imports and remove unnecessary toString()
|
Clean imports and remove unnecessary toString()
|
Java
|
apache-2.0
|
gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas
|
java
|
## Code Before:
package uk.ac.ebi.atlas.commons.writers;
import org.springframework.context.annotation.Scope;
import uk.ac.ebi.atlas.commons.readers.TsvReader;
import uk.ac.ebi.atlas.commons.readers.impl.TsvReaderImpl;
import uk.ac.ebi.atlas.commons.writers.impl.TsvWriterImpl;
import javax.inject.Named;
import java.io.*;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.MessageFormat;
@Named
@Scope("prototype")
public class FileTsvWriterBuilder {
private String experimentAccession;
private String tsvFilePathTemplate;
private boolean append = true;
public FileTsvWriterBuilder() {
}
public FileTsvWriterBuilder withExperimentAccession(String experimentAccession) {
this.experimentAccession = experimentAccession;
return this;
}
public FileTsvWriterBuilder forTsvFilePathTemplate(String tsvFilePathTemplate) {
this.tsvFilePathTemplate = tsvFilePathTemplate;
return this;
}
public FileTsvWriterBuilder withAppend(boolean append) {
this.append = append;
return this;
}
public TsvWriter build() {
String tsvFilePath = MessageFormat.format(tsvFilePathTemplate, experimentAccession);
try {
return new TsvWriterImpl(new OutputStreamWriter(new FileOutputStream(new File(tsvFilePath), append)));
} catch (IOException e) {
throw new IllegalStateException("Cannot write TSV file to path " + tsvFilePath.toString(), e);
}
}
}
## Instruction:
Clean imports and remove unnecessary toString()
## Code After:
package uk.ac.ebi.atlas.commons.writers;
import org.springframework.context.annotation.Scope;
import uk.ac.ebi.atlas.commons.writers.impl.TsvWriterImpl;
import javax.inject.Named;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.text.MessageFormat;
@Named
@Scope("prototype")
public class FileTsvWriterBuilder {
private String experimentAccession;
private String tsvFilePathTemplate;
private boolean append = true;
public FileTsvWriterBuilder() {
}
public FileTsvWriterBuilder withExperimentAccession(String experimentAccession) {
this.experimentAccession = experimentAccession;
return this;
}
public FileTsvWriterBuilder forTsvFilePathTemplate(String tsvFilePathTemplate) {
this.tsvFilePathTemplate = tsvFilePathTemplate;
return this;
}
public FileTsvWriterBuilder withAppend(boolean append) {
this.append = append;
return this;
}
public TsvWriter build() {
String tsvFilePath = MessageFormat.format(tsvFilePathTemplate, experimentAccession);
try {
return new TsvWriterImpl(new OutputStreamWriter(new FileOutputStream(new File(tsvFilePath), append)));
} catch (IOException e) {
throw new IllegalStateException("Cannot write TSV file to path " + tsvFilePath, e);
}
}
}
|
...
package uk.ac.ebi.atlas.commons.writers;
import org.springframework.context.annotation.Scope;
import uk.ac.ebi.atlas.commons.writers.impl.TsvWriterImpl;
import javax.inject.Named;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.text.MessageFormat;
@Named
...
try {
return new TsvWriterImpl(new OutputStreamWriter(new FileOutputStream(new File(tsvFilePath), append)));
} catch (IOException e) {
throw new IllegalStateException("Cannot write TSV file to path " + tsvFilePath, e);
}
}
...
|
fcbf1edd7bc47abf427f54f0bbb8a73d55079074
|
production/source/se/ericthelin/fractions/Fraction.java
|
production/source/se/ericthelin/fractions/Fraction.java
|
package se.ericthelin.fractions;
import static java.util.Objects.requireNonNull;
public class Fraction {
public static Fraction of(String text) {
return new Fraction(text);
}
private final String text;
private Fraction(String text) {
this.text = requireNonNull(text);
}
@Override
public String toString() {
return text;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Fraction)) {
return false;
}
Fraction other = (Fraction) obj;
return text.equals(other.text);
}
public Fraction plus(Fraction term) {
throw new UnsupportedOperationException("Not yet implemented");
}
}
|
package se.ericthelin.fractions;
import static java.util.Objects.requireNonNull;
public class Fraction {
public static Fraction of(String text) {
return new Fraction(text);
}
private final String text;
private Fraction(String text) {
this.text = requireNonNull(text);
}
@Override
public String toString() {
return text;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof Fraction)) {
return false;
}
Fraction other = (Fraction) obj;
return text.equals(other.text);
}
public Fraction plus(Fraction term) {
throw new UnsupportedOperationException("Not yet implemented");
}
}
|
Optimize equals check for same instance
|
Optimize equals check for same instance
|
Java
|
mit
|
eric-thelin/fractions
|
java
|
## Code Before:
package se.ericthelin.fractions;
import static java.util.Objects.requireNonNull;
public class Fraction {
public static Fraction of(String text) {
return new Fraction(text);
}
private final String text;
private Fraction(String text) {
this.text = requireNonNull(text);
}
@Override
public String toString() {
return text;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Fraction)) {
return false;
}
Fraction other = (Fraction) obj;
return text.equals(other.text);
}
public Fraction plus(Fraction term) {
throw new UnsupportedOperationException("Not yet implemented");
}
}
## Instruction:
Optimize equals check for same instance
## Code After:
package se.ericthelin.fractions;
import static java.util.Objects.requireNonNull;
public class Fraction {
public static Fraction of(String text) {
return new Fraction(text);
}
private final String text;
private Fraction(String text) {
this.text = requireNonNull(text);
}
@Override
public String toString() {
return text;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof Fraction)) {
return false;
}
Fraction other = (Fraction) obj;
return text.equals(other.text);
}
public Fraction plus(Fraction term) {
throw new UnsupportedOperationException("Not yet implemented");
}
}
|
...
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof Fraction)) {
return false;
}
...
|
f9d17e97115d914c9ed231630d01a6d724378f15
|
zou/app/blueprints/source/csv/persons.py
|
zou/app/blueprints/source/csv/persons.py
|
from zou.app.blueprints.source.csv.base import BaseCsvImportResource
from zou.app.models.person import Person
from zou.app.utils import auth, permissions
from sqlalchemy.exc import IntegrityError
class PersonsCsvImportResource(BaseCsvImportResource):
def check_permissions(self):
return permissions.check_admin_permissions()
def import_row(self, row):
first_name = row["First Name"]
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
try:
password = auth.encrypt_password("default")
person = Person.get_by(email=email)
if person is None:
person = Person.create(
email=email,
password=password,
first_name=first_name,
last_name=last_name,
phone=phone
)
else:
person.update({
"first_name": first_name,
"last_name": last_name,
"phone": phone
})
except IntegrityError:
person = Person.get_by(email=email)
return person.serialize_safe()
|
from zou.app.blueprints.source.csv.base import BaseCsvImportResource
from zou.app.models.person import Person
from zou.app.utils import auth, permissions
from sqlalchemy.exc import IntegrityError
class PersonsCsvImportResource(BaseCsvImportResource):
def check_permissions(self):
return permissions.check_admin_permissions()
def import_row(self, row):
first_name = row["First Name"]
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
role = row.get("Role", None)
if role == "Studio Manager":
role = "admin"
elif role == "Supervisor":
role = "manager"
elif role == "Client":
role = "client"
if role is not None and \
len(role) > 0 and \
role not in ["admin", "manager"]:
role = "user"
try:
password = auth.encrypt_password("default")
person = Person.get_by(email=email)
if person is None:
person = Person.create(
email=email,
password=password,
first_name=first_name,
last_name=last_name,
phone=phone,
role=role
)
else:
data = {
"first_name": first_name,
"last_name": last_name,
"phone": phone
}
if role is not None and len(role) > 0:
data["role"] = role
person.update(data)
except IntegrityError:
person = Person.get_by(email=email)
return person.serialize_safe()
|
Allow to import roles when importing people
|
Allow to import roles when importing people
|
Python
|
agpl-3.0
|
cgwire/zou
|
python
|
## Code Before:
from zou.app.blueprints.source.csv.base import BaseCsvImportResource
from zou.app.models.person import Person
from zou.app.utils import auth, permissions
from sqlalchemy.exc import IntegrityError
class PersonsCsvImportResource(BaseCsvImportResource):
def check_permissions(self):
return permissions.check_admin_permissions()
def import_row(self, row):
first_name = row["First Name"]
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
try:
password = auth.encrypt_password("default")
person = Person.get_by(email=email)
if person is None:
person = Person.create(
email=email,
password=password,
first_name=first_name,
last_name=last_name,
phone=phone
)
else:
person.update({
"first_name": first_name,
"last_name": last_name,
"phone": phone
})
except IntegrityError:
person = Person.get_by(email=email)
return person.serialize_safe()
## Instruction:
Allow to import roles when importing people
## Code After:
from zou.app.blueprints.source.csv.base import BaseCsvImportResource
from zou.app.models.person import Person
from zou.app.utils import auth, permissions
from sqlalchemy.exc import IntegrityError
class PersonsCsvImportResource(BaseCsvImportResource):
def check_permissions(self):
return permissions.check_admin_permissions()
def import_row(self, row):
first_name = row["First Name"]
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
role = row.get("Role", None)
if role == "Studio Manager":
role = "admin"
elif role == "Supervisor":
role = "manager"
elif role == "Client":
role = "client"
if role is not None and \
len(role) > 0 and \
role not in ["admin", "manager"]:
role = "user"
try:
password = auth.encrypt_password("default")
person = Person.get_by(email=email)
if person is None:
person = Person.create(
email=email,
password=password,
first_name=first_name,
last_name=last_name,
phone=phone,
role=role
)
else:
data = {
"first_name": first_name,
"last_name": last_name,
"phone": phone
}
if role is not None and len(role) > 0:
data["role"] = role
person.update(data)
except IntegrityError:
person = Person.get_by(email=email)
return person.serialize_safe()
|
...
last_name = row["Last Name"]
email = row["Email"]
phone = row["Phone"]
role = row.get("Role", None)
if role == "Studio Manager":
role = "admin"
elif role == "Supervisor":
role = "manager"
elif role == "Client":
role = "client"
if role is not None and \
len(role) > 0 and \
role not in ["admin", "manager"]:
role = "user"
try:
password = auth.encrypt_password("default")
...
password=password,
first_name=first_name,
last_name=last_name,
phone=phone,
role=role
)
else:
data = {
"first_name": first_name,
"last_name": last_name,
"phone": phone
}
if role is not None and len(role) > 0:
data["role"] = role
person.update(data)
except IntegrityError:
person = Person.get_by(email=email)
...
|
fd7027ae889d61949998ea02fbb56dbc8e6005a4
|
polling_stations/apps/data_importers/management/commands/import_cheltenham.py
|
polling_stations/apps/data_importers/management/commands/import_cheltenham.py
|
from data_importers.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = "CHT"
addresses_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
stations_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.housepostcode in [
"GL50 2RF",
"GL52 6RN",
"GL52 2ES",
"GL53 7AJ",
"GL50 3RB",
"GL53 0HL",
"GL50 2DZ",
]:
return None
return super().address_record_to_dict(record)
|
from data_importers.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = "CHT"
addresses_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
stations_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.housepostcode in [
"GL50 2RF",
"GL52 6RN",
"GL52 2ES",
"GL53 7AJ",
"GL50 3RB",
"GL53 0HL",
"GL50 2DZ",
]:
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.pollingstationnumber == "191":
record = record._replace(pollingstationaddress_1="")
return super().station_record_to_dict(record)
|
Fix to CHT station name
|
Fix to CHT station name
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
python
|
## Code Before:
from data_importers.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = "CHT"
addresses_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
stations_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.housepostcode in [
"GL50 2RF",
"GL52 6RN",
"GL52 2ES",
"GL53 7AJ",
"GL50 3RB",
"GL53 0HL",
"GL50 2DZ",
]:
return None
return super().address_record_to_dict(record)
## Instruction:
Fix to CHT station name
## Code After:
from data_importers.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = "CHT"
addresses_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
stations_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.housepostcode in [
"GL50 2RF",
"GL52 6RN",
"GL52 2ES",
"GL53 7AJ",
"GL50 3RB",
"GL53 0HL",
"GL50 2DZ",
]:
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.pollingstationnumber == "191":
record = record._replace(pollingstationaddress_1="")
return super().station_record_to_dict(record)
|
...
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.pollingstationnumber == "191":
record = record._replace(pollingstationaddress_1="")
return super().station_record_to_dict(record)
...
|
b773186f4e39e531e162e3d56a129a21129864e7
|
bookmarks/views.py
|
bookmarks/views.py
|
from rest_framework import serializers
from rest_framework_json_api.views import ModelViewSet
from .models import Collection, Item
class ItemSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ["id", "key", "value", "kind", "row", "collection"]
class ItemViewSet(ModelViewSet):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ["key", "column", "row", "id"]
class CollectionViewSet(ModelViewSet):
queryset = Collection.objects.all()
serializer_class = CollectionSerializer
|
from rest_framework import serializers
from rest_framework_json_api.views import ModelViewSet
from .models import Collection, Item
class ItemSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ["id", "key", "value", "kind", "row", "collection"]
class ItemViewSet(ModelViewSet):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ["key", "column", "row", "id", "item_set"]
class CollectionViewSet(ModelViewSet):
queryset = Collection.objects.all()
serializer_class = CollectionSerializer
|
Add item_set to collections response
|
Add item_set to collections response
|
Python
|
mit
|
GSC-RNSIT/bookmark-manager,GSC-RNSIT/bookmark-manager,rohithpr/bookmark-manager,rohithpr/bookmark-manager
|
python
|
## Code Before:
from rest_framework import serializers
from rest_framework_json_api.views import ModelViewSet
from .models import Collection, Item
class ItemSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ["id", "key", "value", "kind", "row", "collection"]
class ItemViewSet(ModelViewSet):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ["key", "column", "row", "id"]
class CollectionViewSet(ModelViewSet):
queryset = Collection.objects.all()
serializer_class = CollectionSerializer
## Instruction:
Add item_set to collections response
## Code After:
from rest_framework import serializers
from rest_framework_json_api.views import ModelViewSet
from .models import Collection, Item
class ItemSerializer(serializers.ModelSerializer):
class Meta:
model = Item
fields = ["id", "key", "value", "kind", "row", "collection"]
class ItemViewSet(ModelViewSet):
queryset = Item.objects.all()
serializer_class = ItemSerializer
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ["key", "column", "row", "id", "item_set"]
class CollectionViewSet(ModelViewSet):
queryset = Collection.objects.all()
serializer_class = CollectionSerializer
|
...
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ["key", "column", "row", "id", "item_set"]
class CollectionViewSet(ModelViewSet):
...
|
f70a1ae6d86b5e789b5f6120db2772ec492bc088
|
mardek_sol_reader.py
|
mardek_sol_reader.py
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
Fix to use binary read format
|
Fix to use binary read format
|
Python
|
apache-2.0
|
jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random,jbzdarkid/Random
|
python
|
## Code Before:
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path).read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
## Instruction:
Fix to use binary read format
## Code After:
from struct import unpack
from os import walk, sep
from os.path import expanduser
from re import search
shared_objects_dirs = [
'Library/Application Support/Google/Chrome/Default/Pepper Data/Shockwave Flash/WritableRoot/#SharedObjects',
'Library/Preferences/Macromedia/Flash Player/#SharedObjects',
'AppData\Local\Google\Chrome',
'AppData\Macromedia\Flash Player\#SharedObjects',
]
savefiles = []
for shared_objects_dir in shared_objects_dirs:
for root, dirs, files in walk(expanduser('~')+sep+shared_objects_dir):
for file in files:
if file[:13] == 'MARDEKv3__sg_':
savefiles.append((root.split(sep)[-1], int(file[13:-4]), root+sep+file))
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
|
// ... existing code ...
savefiles.sort()
for dir, file_num, path in savefiles:
f = open(path, 'rb').read()
# 8 represents array, 0003 means 3 elements
m = search('playtime\x08\x00\x00\x00\x03(.{36})', f)
if m:
print 'Save file %d from "%s":' % (file_num, dir),
print '\t%d:%02d:%02d' % unpack('>4xd4xd4xd', m.group(1))
// ... rest of the code ...
|
e90c7d034f070361893f77d7a257640d647be0c7
|
mbuild/tests/test_xyz.py
|
mbuild/tests/test_xyz.py
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
import numpy as np
import pytest
import mbuild as mb
from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_bad_input(self, ethane):
with pytest.raises(ValueError):
assert isinstance(ethane, mb.Compound)
write_xyz(ethane, 'compound.xyz')
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
Add test to ensure write_xyz does not directly take in compound
|
Add test to ensure write_xyz does not directly take in compound
|
Python
|
mit
|
iModels/mbuild,iModels/mbuild
|
python
|
## Code Before:
import numpy as np
import pytest
import mbuild as mb
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
## Instruction:
Add test to ensure write_xyz does not directly take in compound
## Code After:
import numpy as np
import pytest
import mbuild as mb
from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
class TestXYZ(BaseTest):
def test_load_no_top(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert ethane_in.n_bonds == 0
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_wrong_n_atoms(self):
with pytest.raises(MBuildError):
mb.load(get_fn('too_few_atoms.xyz'))
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_bad_input(self, ethane):
with pytest.raises(ValueError):
assert isinstance(ethane, mb.Compound)
write_xyz(ethane, 'compound.xyz')
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert len(ethane_in.children) == 8
assert set([child.name for child in ethane_in.children]) == {'C', 'H'}
def test_coordinates(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
assert np.allclose(ethane.xyz, ethane_in.xyz)
|
# ... existing code ...
import pytest
import mbuild as mb
from mbuild.formats.xyz import write_xyz
from mbuild.utils.io import get_fn
from mbuild.tests.base_test import BaseTest
from mbuild.exceptions import MBuildError
# ... modified code ...
with pytest.raises(MBuildError):
mb.load(get_fn('too_many_atoms.xyz'))
def test_bad_input(self, ethane):
with pytest.raises(ValueError):
assert isinstance(ethane, mb.Compound)
write_xyz(ethane, 'compound.xyz')
def test_save(self, ethane):
ethane.save(filename='ethane.xyz')
ethane_in = mb.load('ethane.xyz')
# ... rest of the code ...
|
38a4629e034f00c65979910109b34499c077687b
|
plasmapy/utils/import_helpers.py
|
plasmapy/utils/import_helpers.py
|
import importlib
import warnings
from distutils.version import LooseVersion
def check_versions(minimum_versions):
"""Raises an ImportError if a dependent package is not installed
and at the required version number, or provides a warning if the
version of the dependent package cannot be found."""
for module_name in minimum_versions.keys():
minimum_version = LooseVersion(minimum_versions[module_name])
try:
module = importlib.import_module(module_name)
module_version = LooseVersion(module.__version__)
except ImportError:
raise ImportError(f"Unable to import {module_name} while "
"importing PlasmaPy.") from None
except AttributeError: # coveralls: ignore
warnings.warn(f"{module_name} version {minimum_version.vstring} "
"is required for PlasmaPy. However, the version of "
f"{module_name} could not be determined to check if "
"this requirement is met.")
else:
if minimum_version > module_version:
raise ImportError(
f"{module_name} {minimum_version} or newer is required "
"for PlasmaPy. The currently installed version is "
f"{module_version}.") from None
|
import importlib
import warnings
import distutils.version as dv
def check_versions(minimum_versions):
"""Raises an ImportError if a dependent package is not installed
and at the required version number, or provides a warning if the
version of the dependent package cannot be found."""
for module_name in minimum_versions.keys():
minimum_version = dv.LooseVersion(minimum_versions[module_name])
try:
module = importlib.import_module(module_name)
module_version = dv.LooseVersion(module.__version__)
except ImportError:
raise ImportError(f"Unable to import {module_name} while "
"importing PlasmaPy.") from None
except AttributeError: # coveralls: ignore
warnings.warn(f"{module_name} version {minimum_version.vstring} "
"is required for PlasmaPy. However, the version of "
f"{module_name} could not be determined to check if "
"this requirement is met.")
else:
if minimum_version > module_version:
raise ImportError(
f"{module_name} {minimum_version} or newer is required "
"for PlasmaPy. The currently installed version is "
f"{module_version}.") from None
|
Make namespaces more clear in input_helpers.py
|
Make namespaces more clear in input_helpers.py
|
Python
|
bsd-3-clause
|
StanczakDominik/PlasmaPy
|
python
|
## Code Before:
import importlib
import warnings
from distutils.version import LooseVersion
def check_versions(minimum_versions):
"""Raises an ImportError if a dependent package is not installed
and at the required version number, or provides a warning if the
version of the dependent package cannot be found."""
for module_name in minimum_versions.keys():
minimum_version = LooseVersion(minimum_versions[module_name])
try:
module = importlib.import_module(module_name)
module_version = LooseVersion(module.__version__)
except ImportError:
raise ImportError(f"Unable to import {module_name} while "
"importing PlasmaPy.") from None
except AttributeError: # coveralls: ignore
warnings.warn(f"{module_name} version {minimum_version.vstring} "
"is required for PlasmaPy. However, the version of "
f"{module_name} could not be determined to check if "
"this requirement is met.")
else:
if minimum_version > module_version:
raise ImportError(
f"{module_name} {minimum_version} or newer is required "
"for PlasmaPy. The currently installed version is "
f"{module_version}.") from None
## Instruction:
Make namespaces more clear in input_helpers.py
## Code After:
import importlib
import warnings
import distutils.version as dv
def check_versions(minimum_versions):
"""Raises an ImportError if a dependent package is not installed
and at the required version number, or provides a warning if the
version of the dependent package cannot be found."""
for module_name in minimum_versions.keys():
minimum_version = dv.LooseVersion(minimum_versions[module_name])
try:
module = importlib.import_module(module_name)
module_version = dv.LooseVersion(module.__version__)
except ImportError:
raise ImportError(f"Unable to import {module_name} while "
"importing PlasmaPy.") from None
except AttributeError: # coveralls: ignore
warnings.warn(f"{module_name} version {minimum_version.vstring} "
"is required for PlasmaPy. However, the version of "
f"{module_name} could not be determined to check if "
"this requirement is met.")
else:
if minimum_version > module_version:
raise ImportError(
f"{module_name} {minimum_version} or newer is required "
"for PlasmaPy. The currently installed version is "
f"{module_version}.") from None
|
# ... existing code ...
import importlib
import warnings
import distutils.version as dv
def check_versions(minimum_versions):
"""Raises an ImportError if a dependent package is not installed
# ... modified code ...
version of the dependent package cannot be found."""
for module_name in minimum_versions.keys():
minimum_version = dv.LooseVersion(minimum_versions[module_name])
try:
module = importlib.import_module(module_name)
module_version = dv.LooseVersion(module.__version__)
except ImportError:
raise ImportError(f"Unable to import {module_name} while "
"importing PlasmaPy.") from None
# ... rest of the code ...
|
595aad8a1bd59ee7dce9b74c752555b0fce03840
|
src/main/java/carpentersblocks/renderer/BlockHandlerCarpentersPressurePlate.java
|
src/main/java/carpentersblocks/renderer/BlockHandlerCarpentersPressurePlate.java
|
package carpentersblocks.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class BlockHandlerCarpentersPressurePlate extends BlockHandlerBase {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.4375F, 0.0F, 1.0F, 0.5625F, 1.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
}
}
|
package carpentersblocks.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class BlockHandlerCarpentersPressurePlate extends BlockHandlerBase {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.0F, 0.0F, 1.0F, 0.125F, 1.0F);
Tessellator.instance.addTranslation(0.0F, 0.4365F, 0.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
Tessellator.instance.addTranslation(0.0F, -0.4365F, 0.0F);
}
}
|
Tweak pressure plate inventory rendering.
|
Tweak pressure plate inventory rendering.
|
Java
|
lgpl-2.1
|
Nuchaz/carpentersblocks,Techern/carpentersblocks,Mineshopper/carpentersblocks,burpingdog1/carpentersblocks
|
java
|
## Code Before:
package carpentersblocks.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class BlockHandlerCarpentersPressurePlate extends BlockHandlerBase {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.4375F, 0.0F, 1.0F, 0.5625F, 1.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
}
}
## Instruction:
Tweak pressure plate inventory rendering.
## Code After:
package carpentersblocks.renderer;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class BlockHandlerCarpentersPressurePlate extends BlockHandlerBase {
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.0F, 0.0F, 1.0F, 0.125F, 1.0F);
Tessellator.instance.addTranslation(0.0F, 0.4365F, 0.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
Tessellator.instance.addTranslation(0.0F, -0.4365F, 0.0F);
}
}
|
# ... existing code ...
import net.minecraft.block.Block;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
# ... modified code ...
@Override
public void renderInventoryBlock(Block block, int metadata, int modelID, RenderBlocks renderBlocks)
{
renderBlocks.setRenderBounds(0.0F, 0.0F, 0.0F, 1.0F, 0.125F, 1.0F);
Tessellator.instance.addTranslation(0.0F, 0.4365F, 0.0F);
super.renderInventoryBlock(block, metadata, modelID, renderBlocks);
Tessellator.instance.addTranslation(0.0F, -0.4365F, 0.0F);
}
}
# ... rest of the code ...
|
a92118d7ee6acde57ab9853186c43a5c6748e8a6
|
tracpro/__init__.py
|
tracpro/__init__.py
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
__version__ = "1.0.0"
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
VERSION = (1, 0, 0, "dev")
def get_version(version):
assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)"
major, minor, micro, state = version
assert isinstance(major, int), "Major version must be an integer."
assert isinstance(minor, int), "Minor version must be an integer."
assert isinstance(micro, int), "Micro version must be an integer."
assert state in ('final', 'dev'), "State must be either final or dev."
if state == 'final':
return "{}.{}.{}".format(major, minor, micro)
else:
return "{}.{}.{}.{}".format(major, minor, micro, state)
__version__ = get_version(VERSION)
|
Use tuple to represent version
|
Use tuple to represent version
|
Python
|
bsd-3-clause
|
rapidpro/tracpro,xkmato/tracpro,xkmato/tracpro,xkmato/tracpro,xkmato/tracpro,rapidpro/tracpro,rapidpro/tracpro
|
python
|
## Code Before:
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
__version__ = "1.0.0"
## Instruction:
Use tuple to represent version
## Code After:
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
VERSION = (1, 0, 0, "dev")
def get_version(version):
assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)"
major, minor, micro, state = version
assert isinstance(major, int), "Major version must be an integer."
assert isinstance(minor, int), "Minor version must be an integer."
assert isinstance(micro, int), "Micro version must be an integer."
assert state in ('final', 'dev'), "State must be either final or dev."
if state == 'final':
return "{}.{}.{}".format(major, minor, micro)
else:
return "{}.{}.{}.{}".format(major, minor, micro, state)
__version__ = get_version(VERSION)
|
...
from .celery import app as celery_app # noqa
VERSION = (1, 0, 0, "dev")
def get_version(version):
assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)"
major, minor, micro, state = version
assert isinstance(major, int), "Major version must be an integer."
assert isinstance(minor, int), "Minor version must be an integer."
assert isinstance(micro, int), "Micro version must be an integer."
assert state in ('final', 'dev'), "State must be either final or dev."
if state == 'final':
return "{}.{}.{}".format(major, minor, micro)
else:
return "{}.{}.{}.{}".format(major, minor, micro, state)
__version__ = get_version(VERSION)
...
|
ecfa18b7f05a23bdc6beab705dc748559eef2873
|
lockdown/decorators.py
|
lockdown/decorators.py
|
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
def lockdown(*args, **kwargs):
"""Define a decorator based on the LockdownMiddleware.
This decorator takes the same arguments as the middleware, but allows a
more granular locking than the middleware.
"""
return decorator_from_middleware_with_args(LockdownMiddleware)(*args,
**kwargs)
|
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
lockdown = decorator_from_middleware_with_args(LockdownMiddleware)
|
Remove wrapping of decorator in a func
|
Remove wrapping of decorator in a func
Growing older, growing wiser ...
This removes the unnecesary wrapping of the decorator in a function
introduced in e4a04c6, as it's not necessary and is less performant than
without.
|
Python
|
bsd-3-clause
|
Dunedan/django-lockdown,Dunedan/django-lockdown
|
python
|
## Code Before:
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
def lockdown(*args, **kwargs):
"""Define a decorator based on the LockdownMiddleware.
This decorator takes the same arguments as the middleware, but allows a
more granular locking than the middleware.
"""
return decorator_from_middleware_with_args(LockdownMiddleware)(*args,
**kwargs)
## Instruction:
Remove wrapping of decorator in a func
Growing older, growing wiser ...
This removes the unnecesary wrapping of the decorator in a function
introduced in e4a04c6, as it's not necessary and is less performant than
without.
## Code After:
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
lockdown = decorator_from_middleware_with_args(LockdownMiddleware)
|
# ... existing code ...
from lockdown.middleware import LockdownMiddleware
lockdown = decorator_from_middleware_with_args(LockdownMiddleware)
# ... rest of the code ...
|
cb82b901e1e554ff668609c19b250971d1c6b441
|
demo/src/main/java/com/logzc/webzic/demo/servlet/ClassLoaderServlet.java
|
demo/src/main/java/com/logzc/webzic/demo/servlet/ClassLoaderServlet.java
|
package com.logzc.webzic.demo.servlet;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by lishuang on 2016/7/7.
*/
@WebServlet("/classloader")
public class ClassLoaderServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter out = response.getWriter();
ClassLoader loader = this.getClass().getClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.flush();
out.close();
}
}
|
package com.logzc.webzic.demo.servlet;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by lishuang on 2016/7/7.
*/
@WebServlet("/classloader")
public class ClassLoaderServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter out = response.getWriter();
ClassLoader loader = this.getClass().getClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.write("<br/>-------------<br/>");
loader = Thread.currentThread().getContextClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.write("<br/>-------------<br/>");
out.flush();
out.close();
}
}
|
Add another classloader test case.
|
Add another classloader test case.
|
Java
|
apache-2.0
|
logzc/Webzic,logzc/Webzic
|
java
|
## Code Before:
package com.logzc.webzic.demo.servlet;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by lishuang on 2016/7/7.
*/
@WebServlet("/classloader")
public class ClassLoaderServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter out = response.getWriter();
ClassLoader loader = this.getClass().getClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.flush();
out.close();
}
}
## Instruction:
Add another classloader test case.
## Code After:
package com.logzc.webzic.demo.servlet;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by lishuang on 2016/7/7.
*/
@WebServlet("/classloader")
public class ClassLoaderServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html");
PrintWriter out = response.getWriter();
ClassLoader loader = this.getClass().getClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.write("<br/>-------------<br/>");
loader = Thread.currentThread().getContextClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.write("<br/>-------------<br/>");
out.flush();
out.close();
}
}
|
...
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.write("<br/>-------------<br/>");
loader = Thread.currentThread().getContextClassLoader();
while(loader != null) {
out.write(loader.getClass().getName()+"<br/>");
loader = loader.getParent();
}
out.write(String.valueOf(loader));
out.write("<br/>-------------<br/>");
out.flush();
out.close();
}
...
|
b934cbb79c1dc620ec23cafd715a0caba6131036
|
src/main/java/org/algorithmprac/sort/AbstractCostAwareSorter.java
|
src/main/java/org/algorithmprac/sort/AbstractCostAwareSorter.java
|
package org.algorithmprac.sort;
import com.google.common.base.Stopwatch;
import java.util.concurrent.TimeUnit;
public abstract class AbstractCostAwareSorter extends AbstractSorter implements CostAwareSorter {
private final Stopwatch stopwatch = Stopwatch.createUnstarted();
private int cmpCount = 0;
private int swapCount = 0;
@Override
protected void preProcess(Comparable[] a) {
resetStatistics();
stopwatch.start();
}
private void resetStatistics() {
stopwatch.reset();
cmpCount = 0;
swapCount = 0;
}
@Override
protected void postProcess(Comparable[] a) {
stopwatch.stop();
}
@Override
public boolean less(Comparable v, Comparable w) {
++cmpCount;
return super.less(v, w);
}
@Override
public void swap(Comparable[] a, int i, int j) {
++swapCount;
super.swap(a, i, j);
}
public String getReadableCost() {
StringBuilder sb = new StringBuilder();
sb.append("cost is ");
sb.append(stopwatch.toString());
sb.append(", compare count is ");
sb.append(cmpCount);
sb.append(", swap count is ");
sb.append(swapCount);
return sb.toString();
}
public long getCost(TimeUnit timeUnit) {
return stopwatch.elapsed(timeUnit);
}
public int getCmpCount() {
return cmpCount;
}
public int getSwapCount() {
return swapCount;
}
}
|
package org.algorithmprac.sort;
import com.google.common.base.Stopwatch;
import java.util.concurrent.TimeUnit;
public abstract class AbstractCostAwareSorter extends AbstractSorter implements CostAwareSorter {
private final Stopwatch stopwatch = Stopwatch.createUnstarted();
private long cmpCount = 0;
private long swapCount = 0;
@Override
protected void preProcess(Comparable[] a) {
resetStatistics();
stopwatch.start();
}
private void resetStatistics() {
stopwatch.reset();
cmpCount = 0L;
swapCount = 0L;
}
@Override
protected void postProcess(Comparable[] a) {
stopwatch.stop();
}
@Override
public boolean less(Comparable v, Comparable w) {
++cmpCount;
return super.less(v, w);
}
@Override
public void swap(Comparable[] a, int i, int j) {
++swapCount;
super.swap(a, i, j);
}
public String getReadableCost() {
StringBuilder sb = new StringBuilder();
sb.append("cost is ");
sb.append(stopwatch.toString());
sb.append(", compare count is ");
sb.append(cmpCount);
sb.append(", swap count is ");
sb.append(swapCount);
return sb.toString();
}
public long getCost(TimeUnit timeUnit) {
return stopwatch.elapsed(timeUnit);
}
public long getCmpCount() {
return cmpCount;
}
public long getSwapCount() {
return swapCount;
}
}
|
Change comp and swap count from int to long
|
Change comp and swap count from int to long
|
Java
|
mit
|
xkniu/algorithm-prac
|
java
|
## Code Before:
package org.algorithmprac.sort;
import com.google.common.base.Stopwatch;
import java.util.concurrent.TimeUnit;
public abstract class AbstractCostAwareSorter extends AbstractSorter implements CostAwareSorter {
private final Stopwatch stopwatch = Stopwatch.createUnstarted();
private int cmpCount = 0;
private int swapCount = 0;
@Override
protected void preProcess(Comparable[] a) {
resetStatistics();
stopwatch.start();
}
private void resetStatistics() {
stopwatch.reset();
cmpCount = 0;
swapCount = 0;
}
@Override
protected void postProcess(Comparable[] a) {
stopwatch.stop();
}
@Override
public boolean less(Comparable v, Comparable w) {
++cmpCount;
return super.less(v, w);
}
@Override
public void swap(Comparable[] a, int i, int j) {
++swapCount;
super.swap(a, i, j);
}
public String getReadableCost() {
StringBuilder sb = new StringBuilder();
sb.append("cost is ");
sb.append(stopwatch.toString());
sb.append(", compare count is ");
sb.append(cmpCount);
sb.append(", swap count is ");
sb.append(swapCount);
return sb.toString();
}
public long getCost(TimeUnit timeUnit) {
return stopwatch.elapsed(timeUnit);
}
public int getCmpCount() {
return cmpCount;
}
public int getSwapCount() {
return swapCount;
}
}
## Instruction:
Change comp and swap count from int to long
## Code After:
package org.algorithmprac.sort;
import com.google.common.base.Stopwatch;
import java.util.concurrent.TimeUnit;
public abstract class AbstractCostAwareSorter extends AbstractSorter implements CostAwareSorter {
private final Stopwatch stopwatch = Stopwatch.createUnstarted();
private long cmpCount = 0;
private long swapCount = 0;
@Override
protected void preProcess(Comparable[] a) {
resetStatistics();
stopwatch.start();
}
private void resetStatistics() {
stopwatch.reset();
cmpCount = 0L;
swapCount = 0L;
}
@Override
protected void postProcess(Comparable[] a) {
stopwatch.stop();
}
@Override
public boolean less(Comparable v, Comparable w) {
++cmpCount;
return super.less(v, w);
}
@Override
public void swap(Comparable[] a, int i, int j) {
++swapCount;
super.swap(a, i, j);
}
public String getReadableCost() {
StringBuilder sb = new StringBuilder();
sb.append("cost is ");
sb.append(stopwatch.toString());
sb.append(", compare count is ");
sb.append(cmpCount);
sb.append(", swap count is ");
sb.append(swapCount);
return sb.toString();
}
public long getCost(TimeUnit timeUnit) {
return stopwatch.elapsed(timeUnit);
}
public long getCmpCount() {
return cmpCount;
}
public long getSwapCount() {
return swapCount;
}
}
|
...
private final Stopwatch stopwatch = Stopwatch.createUnstarted();
private long cmpCount = 0;
private long swapCount = 0;
@Override
protected void preProcess(Comparable[] a) {
...
private void resetStatistics() {
stopwatch.reset();
cmpCount = 0L;
swapCount = 0L;
}
@Override
...
return stopwatch.elapsed(timeUnit);
}
public long getCmpCount() {
return cmpCount;
}
public long getSwapCount() {
return swapCount;
}
}
...
|
edbbcdda383c8f15c5a3c496490f2a85125d844d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import ofxparse
setup(name='ofxparse',
version=ofxparse.__version__,
description="Tools for working with the OFX (Open Financial Exchange) file format",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='ofx, Open Financial Exchange, file formats',
author='Jerry Seutter',
author_email='[email protected]',
url='http://sites.google.com/site/ofxparse',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"BeautifulSoup>=3.0",
],
entry_points="""
""",
use_2to3 = True,
test_suite = 'tests',
)
|
from setuptools import setup, find_packages
import re
VERSION = re.search(r"__version__ = '(.*?)'",
open("ofxparse/__init__.py").read()).group(1)
setup(name='ofxparse',
version=VERSION,
description="Tools for working with the OFX (Open Financial Exchange) file format",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='ofx, Open Financial Exchange, file formats',
author='Jerry Seutter',
author_email='[email protected]',
url='http://sites.google.com/site/ofxparse',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"BeautifulSoup>=3.0",
],
entry_points="""
""",
use_2to3 = True,
test_suite = 'tests',
)
|
Fix ImportError when installing without BeautifulSoup
|
Fix ImportError when installing without BeautifulSoup
|
Python
|
mit
|
udibr/ofxparse,rdsteed/ofxparse,jseutter/ofxparse,jaraco/ofxparse,hiromu2000/ofxparse,egh/ofxparse
|
python
|
## Code Before:
from setuptools import setup, find_packages
import ofxparse
setup(name='ofxparse',
version=ofxparse.__version__,
description="Tools for working with the OFX (Open Financial Exchange) file format",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='ofx, Open Financial Exchange, file formats',
author='Jerry Seutter',
author_email='[email protected]',
url='http://sites.google.com/site/ofxparse',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"BeautifulSoup>=3.0",
],
entry_points="""
""",
use_2to3 = True,
test_suite = 'tests',
)
## Instruction:
Fix ImportError when installing without BeautifulSoup
## Code After:
from setuptools import setup, find_packages
import re
VERSION = re.search(r"__version__ = '(.*?)'",
open("ofxparse/__init__.py").read()).group(1)
setup(name='ofxparse',
version=VERSION,
description="Tools for working with the OFX (Open Financial Exchange) file format",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
keywords='ofx, Open Financial Exchange, file formats',
author='Jerry Seutter',
author_email='[email protected]',
url='http://sites.google.com/site/ofxparse',
license='MIT License',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=[
# -*- Extra requirements: -*-
"BeautifulSoup>=3.0",
],
entry_points="""
""",
use_2to3 = True,
test_suite = 'tests',
)
|
# ... existing code ...
from setuptools import setup, find_packages
import re
VERSION = re.search(r"__version__ = '(.*?)'",
open("ofxparse/__init__.py").read()).group(1)
setup(name='ofxparse',
version=VERSION,
description="Tools for working with the OFX (Open Financial Exchange) file format",
long_description=open("./README", "r").read(),
# Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
# ... rest of the code ...
|
1ef6af234acfa116eb9299ea8a7deaf711784b4f
|
app/src/main/java/com/ustwo/boilerplate/base/BasePresenter.java
|
app/src/main/java/com/ustwo/boilerplate/base/BasePresenter.java
|
package com.ustwo.boilerplate.base;
import android.support.annotation.CallSuper;
import android.support.annotation.NonNull;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.disposables.Disposable;
public abstract class BasePresenter<T extends BaseView> {
private CompositeDisposable disposables;
private T view;
/**
* On view attached.
*
* @param view View attached to the presenter
*/
@CallSuper
public void onViewAttached(@NonNull final T view) {
if (this.view != null) {
throw new IllegalStateException("View " + this.view + " is already attached. Cannot attach "
+ view);
}
this.view = view;
}
/**
* On view detached.
*/
@CallSuper
public void onViewDetached() {
if (view == null) {
throw new IllegalStateException("View is already detached");
}
view = null;
if (disposables != null) {
disposables.dispose();
disposables = null;
}
}
/**
* Dispose on view detach.
*
* @param disposable Disposable to be disposed of upon view detachment
*/
@CallSuper
protected void disposeOnViewDetach(@NonNull final Disposable disposable) {
if (disposables == null) {
disposables = new CompositeDisposable();
}
disposables.add(disposable);
}
public boolean isViewAttached() {
return view != null;
}
}
|
package com.ustwo.boilerplate.base;
import android.support.annotation.CallSuper;
import android.support.annotation.NonNull;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.disposables.Disposable;
public abstract class BasePresenter<T extends BaseView> {
private CompositeDisposable disposables;
private T view;
/**
* On view attached. To be called when your view is initialised.
*
* @param view View attached to the presenter
*/
@CallSuper
public void onViewAttached(@NonNull final T view) {
if (this.view != null) {
throw new IllegalStateException("View " + this.view + " is already attached. Cannot attach "
+ view);
}
this.view = view;
}
/**
* On view detached. Intended as a cleanup process that should be called when the view will no
* longer be in use.
*/
@CallSuper
public void onViewDetached() {
if (view == null) {
throw new IllegalStateException("View is already detached");
}
view = null;
if (disposables != null) {
disposables.dispose();
disposables = null;
}
}
/**
* Dispose on view detach.
*
* @param disposable Disposable to be disposed of upon view detachment
*/
@CallSuper
protected void disposeOnViewDetach(@NonNull final Disposable disposable) {
if (disposables == null) {
disposables = new CompositeDisposable();
}
disposables.add(disposable);
}
public boolean isViewAttached() {
return view != null;
}
}
|
Add clarification on expected usage of onViewAttached and onViewDetached
|
Add clarification on expected usage of onViewAttached and onViewDetached
|
Java
|
apache-2.0
|
ustwo/android-boilerplate,ustwo/android-boilerplate
|
java
|
## Code Before:
package com.ustwo.boilerplate.base;
import android.support.annotation.CallSuper;
import android.support.annotation.NonNull;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.disposables.Disposable;
public abstract class BasePresenter<T extends BaseView> {
private CompositeDisposable disposables;
private T view;
/**
* On view attached.
*
* @param view View attached to the presenter
*/
@CallSuper
public void onViewAttached(@NonNull final T view) {
if (this.view != null) {
throw new IllegalStateException("View " + this.view + " is already attached. Cannot attach "
+ view);
}
this.view = view;
}
/**
* On view detached.
*/
@CallSuper
public void onViewDetached() {
if (view == null) {
throw new IllegalStateException("View is already detached");
}
view = null;
if (disposables != null) {
disposables.dispose();
disposables = null;
}
}
/**
* Dispose on view detach.
*
* @param disposable Disposable to be disposed of upon view detachment
*/
@CallSuper
protected void disposeOnViewDetach(@NonNull final Disposable disposable) {
if (disposables == null) {
disposables = new CompositeDisposable();
}
disposables.add(disposable);
}
public boolean isViewAttached() {
return view != null;
}
}
## Instruction:
Add clarification on expected usage of onViewAttached and onViewDetached
## Code After:
package com.ustwo.boilerplate.base;
import android.support.annotation.CallSuper;
import android.support.annotation.NonNull;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.disposables.Disposable;
public abstract class BasePresenter<T extends BaseView> {
private CompositeDisposable disposables;
private T view;
/**
* On view attached. To be called when your view is initialised.
*
* @param view View attached to the presenter
*/
@CallSuper
public void onViewAttached(@NonNull final T view) {
if (this.view != null) {
throw new IllegalStateException("View " + this.view + " is already attached. Cannot attach "
+ view);
}
this.view = view;
}
/**
* On view detached. Intended as a cleanup process that should be called when the view will no
* longer be in use.
*/
@CallSuper
public void onViewDetached() {
if (view == null) {
throw new IllegalStateException("View is already detached");
}
view = null;
if (disposables != null) {
disposables.dispose();
disposables = null;
}
}
/**
* Dispose on view detach.
*
* @param disposable Disposable to be disposed of upon view detachment
*/
@CallSuper
protected void disposeOnViewDetach(@NonNull final Disposable disposable) {
if (disposables == null) {
disposables = new CompositeDisposable();
}
disposables.add(disposable);
}
public boolean isViewAttached() {
return view != null;
}
}
|
...
private T view;
/**
* On view attached. To be called when your view is initialised.
*
* @param view View attached to the presenter
*/
...
}
/**
* On view detached. Intended as a cleanup process that should be called when the view will no
* longer be in use.
*/
@CallSuper
public void onViewDetached() {
...
|
f516749bc41dbebeb5b0ae07078af78f510a592e
|
lib/markdown_deux/__init__.py
|
lib/markdown_deux/__init__.py
|
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.
|
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.
|
Python
|
mit
|
douzepouze/django-markdown-tag,trentm/django-markdown-deux,gogobook/django-markdown-deux,gogobook/django-markdown-deux
|
python
|
## Code Before:
__version_info__ = (1, 0, 2)
__version__ = '1.0.1'
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
print "XXX markdown_deux.markdown(style=%r) -> %r" % (style, get_style(style))
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
## Instruction:
Fix having ver info written twice (divergence). Makes "mk cut_a_release" ver update work.
## Code After:
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
def markdown(text, style="default"):
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
from markdown_deux.conf import settings
try:
return settings.MARKDOWN_DEUX_STYLES[style]
except KeyError:
return settings.MARKDOWN_DEUX_STYLES.get("default",
settings.MARKDOWN_DEUX_DEFAULT_STYLE)
|
...
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "Trent Mick"
...
if not text:
return ""
import markdown2
return markdown2.markdown(text, **get_style(style))
def get_style(style):
...
|
579f5ff692d4fbd9cd54ce86471afe75657f4475
|
src/main/java/com/thecoffeine/auth/controllers/SecurityController.java
|
src/main/java/com/thecoffeine/auth/controllers/SecurityController.java
|
package com.thecoffeine.auth.controllers;
import com.thecoffeine.auth.view.form.RegistrationForm;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.servlet.http.HttpServletRequest;
/**
* Security Controller.
*
* @version 1.0
*/
@Controller
public class SecurityController {
/**
* Display LogIn form.
*
* @param request HTTP request.
* @param model View's model.
*
* @return View name.
*/
@RequestMapping( value = "/login" )
public String login( HttpServletRequest request, Model model ) {
//- Set params for view -//
model.addAttribute( "_csrf", request.getAttribute( "_csrf" ) );
model.addAttribute( "error", request.getParameterMap().containsKey( "error" ) );
return "login";
}
/**
* Handle Sign Up.
*
* @param request HTTP request.
*
* @return View name.
*/
@RequestMapping(method = RequestMethod.POST)
public String signUp(
@RequestBody
RegistrationForm form,
HttpServletRequest request
) {
return "sign-up.success";
}
}
|
package com.thecoffeine.auth.controllers;
import com.thecoffeine.auth.view.form.RegistrationForm;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.servlet.http.HttpServletRequest;
/**
* Security Controller.
*
* @version 1.0
*/
@Controller
public class SecurityController {
/**
* Display LogIn form.
*
* @param request HTTP request.
* @param model View's model.
*
* @return View name.
*/
@RequestMapping( value = "/login" )
public String login( HttpServletRequest request, Model model ) {
//- Set params for view -//
model.addAttribute( "_csrf", request.getAttribute( "_csrf" ) );
model.addAttribute( "error", request.getParameterMap().containsKey( "error" ) );
return "login";
}
/**
* Handle Sign Up.
*
* @param request HTTP request.
*
* @return View name.
*/
@RequestMapping(value = "/signup", method = RequestMethod.POST)
public String signUp(
@RequestBody
RegistrationForm form,
HttpServletRequest request
) {
return "sign-up.success";
}
}
|
Add resource name for sign up.
|
Add resource name for sign up.
|
Java
|
mit
|
coffeine-009/auth,coffeine-009/auth
|
java
|
## Code Before:
package com.thecoffeine.auth.controllers;
import com.thecoffeine.auth.view.form.RegistrationForm;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.servlet.http.HttpServletRequest;
/**
* Security Controller.
*
* @version 1.0
*/
@Controller
public class SecurityController {
/**
* Display LogIn form.
*
* @param request HTTP request.
* @param model View's model.
*
* @return View name.
*/
@RequestMapping( value = "/login" )
public String login( HttpServletRequest request, Model model ) {
//- Set params for view -//
model.addAttribute( "_csrf", request.getAttribute( "_csrf" ) );
model.addAttribute( "error", request.getParameterMap().containsKey( "error" ) );
return "login";
}
/**
* Handle Sign Up.
*
* @param request HTTP request.
*
* @return View name.
*/
@RequestMapping(method = RequestMethod.POST)
public String signUp(
@RequestBody
RegistrationForm form,
HttpServletRequest request
) {
return "sign-up.success";
}
}
## Instruction:
Add resource name for sign up.
## Code After:
package com.thecoffeine.auth.controllers;
import com.thecoffeine.auth.view.form.RegistrationForm;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.servlet.http.HttpServletRequest;
/**
* Security Controller.
*
* @version 1.0
*/
@Controller
public class SecurityController {
/**
* Display LogIn form.
*
* @param request HTTP request.
* @param model View's model.
*
* @return View name.
*/
@RequestMapping( value = "/login" )
public String login( HttpServletRequest request, Model model ) {
//- Set params for view -//
model.addAttribute( "_csrf", request.getAttribute( "_csrf" ) );
model.addAttribute( "error", request.getParameterMap().containsKey( "error" ) );
return "login";
}
/**
* Handle Sign Up.
*
* @param request HTTP request.
*
* @return View name.
*/
@RequestMapping(value = "/signup", method = RequestMethod.POST)
public String signUp(
@RequestBody
RegistrationForm form,
HttpServletRequest request
) {
return "sign-up.success";
}
}
|
// ... existing code ...
*
* @return View name.
*/
@RequestMapping(value = "/signup", method = RequestMethod.POST)
public String signUp(
@RequestBody
RegistrationForm form,
// ... rest of the code ...
|
cb48b22ce6d9837784fd33e9cb6c5a84374a1efc
|
src/year2014/DigitalRoot.java
|
src/year2014/DigitalRoot.java
|
package year2014;
import java.util.Scanner;
import java.util.stream.IntStream;
/**
* <h3>Digital Root</h3>
* <p>The digital root of a number is obtained by summing its individual digits,
* summing those individual digits, and continuing the process until the final sum
* is a single digit (that is, less than 10). For example, the digital root of 1234 is 1</p>
*
* @author John Cameron
* @see <a href="https://www.seminolestate.edu/computers/competition/samples/2014">Problem Descrption and Requirements</a>
*/
public class DigitalRoot {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter a non-negative integer number: ");
final int input = Math.abs(scanner.nextInt());
scanner.close();
int digitalRoot = input;
while (digitalRoot > 9)
digitalRoot = sumIndividualDigits(digitalRoot);
System.out.println("The digital root of " + input + " is " + digitalRoot);
}
private static int sumIndividualDigits(int input) {
if (input < 0)
throw new IllegalArgumentException("Input must be positive");
char[] chars = String.valueOf(input).toCharArray();
return IntStream.range(0, chars.length)
.map(i -> Character.getNumericValue(chars[i])).sum();
}
}
|
package year2014;
import java.util.Scanner;
/**
* <h3>Digital Root</h3>
* <p>The digital root of a number is obtained by summing its individual digits,
* summing those individual digits, and continuing the process until the final sum
* is a single digit (that is, less than 10). For example, the digital root of 1234 is 1</p>
*
* @author John Cameron
* @see <a href="https://www.seminolestate.edu/computers/competition/samples/2014">Problem Descrption and Requirements</a>
*/
public class DigitalRoot {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter a non-negative integer number: ");
final int input = Math.abs(scanner.nextInt());
scanner.close();
int digitalRoot = input;
while (digitalRoot > 9)
digitalRoot = sumIndividualDigits(digitalRoot);
System.out.println("The digital root of " + input + " is " + digitalRoot);
}
private static int sumIndividualDigits(int input) {
if (input < 0)
throw new IllegalArgumentException("Input must be positive");
int sum = 0;
while (input > 0) {
sum += input % 10;
input /= 10;
}
return sum;
//String approach method
/*char[] chars = String.valueOf(input).toCharArray();
return IntStream.range(0, chars.length)
.map(i -> Character.getNumericValue(chars[i])).sum();*/
}
}
|
Use Integer approach in summing digits
|
Use Integer approach in summing digits
|
Java
|
mit
|
jcameron2/Seminole-Intercollegiate-Programming-Competition
|
java
|
## Code Before:
package year2014;
import java.util.Scanner;
import java.util.stream.IntStream;
/**
* <h3>Digital Root</h3>
* <p>The digital root of a number is obtained by summing its individual digits,
* summing those individual digits, and continuing the process until the final sum
* is a single digit (that is, less than 10). For example, the digital root of 1234 is 1</p>
*
* @author John Cameron
* @see <a href="https://www.seminolestate.edu/computers/competition/samples/2014">Problem Descrption and Requirements</a>
*/
public class DigitalRoot {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter a non-negative integer number: ");
final int input = Math.abs(scanner.nextInt());
scanner.close();
int digitalRoot = input;
while (digitalRoot > 9)
digitalRoot = sumIndividualDigits(digitalRoot);
System.out.println("The digital root of " + input + " is " + digitalRoot);
}
private static int sumIndividualDigits(int input) {
if (input < 0)
throw new IllegalArgumentException("Input must be positive");
char[] chars = String.valueOf(input).toCharArray();
return IntStream.range(0, chars.length)
.map(i -> Character.getNumericValue(chars[i])).sum();
}
}
## Instruction:
Use Integer approach in summing digits
## Code After:
package year2014;
import java.util.Scanner;
/**
* <h3>Digital Root</h3>
* <p>The digital root of a number is obtained by summing its individual digits,
* summing those individual digits, and continuing the process until the final sum
* is a single digit (that is, less than 10). For example, the digital root of 1234 is 1</p>
*
* @author John Cameron
* @see <a href="https://www.seminolestate.edu/computers/competition/samples/2014">Problem Descrption and Requirements</a>
*/
public class DigitalRoot {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Enter a non-negative integer number: ");
final int input = Math.abs(scanner.nextInt());
scanner.close();
int digitalRoot = input;
while (digitalRoot > 9)
digitalRoot = sumIndividualDigits(digitalRoot);
System.out.println("The digital root of " + input + " is " + digitalRoot);
}
private static int sumIndividualDigits(int input) {
if (input < 0)
throw new IllegalArgumentException("Input must be positive");
int sum = 0;
while (input > 0) {
sum += input % 10;
input /= 10;
}
return sum;
//String approach method
/*char[] chars = String.valueOf(input).toCharArray();
return IntStream.range(0, chars.length)
.map(i -> Character.getNumericValue(chars[i])).sum();*/
}
}
|
# ... existing code ...
package year2014;
import java.util.Scanner;
/**
* <h3>Digital Root</h3>
# ... modified code ...
if (input < 0)
throw new IllegalArgumentException("Input must be positive");
int sum = 0;
while (input > 0) {
sum += input % 10;
input /= 10;
}
return sum;
//String approach method
/*char[] chars = String.valueOf(input).toCharArray();
return IntStream.range(0, chars.length)
.map(i -> Character.getNumericValue(chars[i])).sum();*/
}
}
# ... rest of the code ...
|
cd615fc71ae421565fc50a1ee129867c7beb55c7
|
src/main/java/com/demonwav/mcdev/update/Channels.java
|
src/main/java/com/demonwav/mcdev/update/Channels.java
|
/*
* Minecraft Dev for IntelliJ
*
* https://minecraftdev.org
*
* Copyright (c) 2016 minecraft-dev
*
* MIT License
*/
package com.demonwav.mcdev.update;
import com.google.common.collect.ImmutableList;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public enum Channels {
SCOTLIN("Kotlin And Scala Support", "https://plugins.jetbrains.com/plugins/kotlin%20and%20scala%20support/8327", 1);
private final String title;
private final String url;
private final int index;
Channels(final String title, final String url, final int index) {
this.title = title;
this.url = url;
this.index = index;
}
public String getTitle() {
return title;
}
public String getUrl() {
return url;
}
public int getIndex() {
return index;
}
@Nullable
public static Channels getChannel(int index) {
for (Channels channels : values()) {
if (channels.getIndex() == index) {
return channels;
}
}
return null;
}
public static List<Channels> orderedList() {
return ImmutableList.of(SCOTLIN);
}
public boolean hasChannel() {
return UpdateSettings.getInstance().getPluginHosts().contains(url);
}
}
|
/*
* Minecraft Dev for IntelliJ
*
* https://minecraftdev.org
*
* Copyright (c) 2016 minecraft-dev
*
* MIT License
*/
package com.demonwav.mcdev.update;
import com.google.common.collect.ImmutableList;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public enum Channels {
;
private final String title;
private final String url;
private final int index;
Channels(final String title, final String url, final int index) {
this.title = title;
this.url = url;
this.index = index;
}
public String getTitle() {
return title;
}
public String getUrl() {
return url;
}
public int getIndex() {
return index;
}
@Nullable
public static Channels getChannel(int index) {
for (Channels channels : values()) {
if (channels.getIndex() == index) {
return channels;
}
}
return null;
}
public static List<Channels> orderedList() {
return ImmutableList.of();
}
public boolean hasChannel() {
return UpdateSettings.getInstance().getPluginHosts().contains(url);
}
}
|
Remove Kotlin and Scala support channel, as it's diverged too far
|
Remove Kotlin and Scala support channel, as it's diverged too far
|
Java
|
mit
|
DemonWav/MinecraftDevIntelliJ,minecraft-dev/MinecraftDev,DemonWav/MinecraftDev,DemonWav/MinecraftDevIntelliJ,minecraft-dev/MinecraftDev,DemonWav/MinecraftDev,minecraft-dev/MinecraftDev,DemonWav/MinecraftDevIntelliJ,DemonWav/IntelliJBukkitSupport,DemonWav/MinecraftDev
|
java
|
## Code Before:
/*
* Minecraft Dev for IntelliJ
*
* https://minecraftdev.org
*
* Copyright (c) 2016 minecraft-dev
*
* MIT License
*/
package com.demonwav.mcdev.update;
import com.google.common.collect.ImmutableList;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public enum Channels {
SCOTLIN("Kotlin And Scala Support", "https://plugins.jetbrains.com/plugins/kotlin%20and%20scala%20support/8327", 1);
private final String title;
private final String url;
private final int index;
Channels(final String title, final String url, final int index) {
this.title = title;
this.url = url;
this.index = index;
}
public String getTitle() {
return title;
}
public String getUrl() {
return url;
}
public int getIndex() {
return index;
}
@Nullable
public static Channels getChannel(int index) {
for (Channels channels : values()) {
if (channels.getIndex() == index) {
return channels;
}
}
return null;
}
public static List<Channels> orderedList() {
return ImmutableList.of(SCOTLIN);
}
public boolean hasChannel() {
return UpdateSettings.getInstance().getPluginHosts().contains(url);
}
}
## Instruction:
Remove Kotlin and Scala support channel, as it's diverged too far
## Code After:
/*
* Minecraft Dev for IntelliJ
*
* https://minecraftdev.org
*
* Copyright (c) 2016 minecraft-dev
*
* MIT License
*/
package com.demonwav.mcdev.update;
import com.google.common.collect.ImmutableList;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import org.jetbrains.annotations.Nullable;
import java.util.List;
public enum Channels {
;
private final String title;
private final String url;
private final int index;
Channels(final String title, final String url, final int index) {
this.title = title;
this.url = url;
this.index = index;
}
public String getTitle() {
return title;
}
public String getUrl() {
return url;
}
public int getIndex() {
return index;
}
@Nullable
public static Channels getChannel(int index) {
for (Channels channels : values()) {
if (channels.getIndex() == index) {
return channels;
}
}
return null;
}
public static List<Channels> orderedList() {
return ImmutableList.of();
}
public boolean hasChannel() {
return UpdateSettings.getInstance().getPluginHosts().contains(url);
}
}
|
// ... existing code ...
import java.util.List;
public enum Channels {
;
private final String title;
private final String url;
// ... modified code ...
}
public static List<Channels> orderedList() {
return ImmutableList.of();
}
public boolean hasChannel() {
// ... rest of the code ...
|
c870f68c77652a11f8401bbbb981797694174288
|
src/py/crankshaft/setup.py
|
src/py/crankshaft/setup.py
|
from setuptools import setup, find_packages
setup(
name='crankshaft',
version='0.0.0',
description='CartoDB Spatial Analysis Python Library',
url='https://github.com/CartoDB/crankshaft',
author='Data Services Team - CartoDB',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Mapping comunity',
'Topic :: Maps :: Mapping Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='maps mapping tools spatial analysis geostatistics',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'dev': ['unittest'],
'test': ['unittest', 'nose', 'mock'],
},
# The choice of component versions is dictated by what's
# provisioned in the production servers.
install_requires=['scipy==0.17.1', 'pysal==1.9.1', 'scikit-learn==0.17.1'],
requires=['scipy', 'pysal', 'numpy', 'sklearn'],
test_suite='test'
)
|
from setuptools import setup, find_packages
setup(
name='crankshaft',
version='0.0.0',
description='CartoDB Spatial Analysis Python Library',
url='https://github.com/CartoDB/crankshaft',
author='Data Services Team - CartoDB',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Mapping comunity',
'Topic :: Maps :: Mapping Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='maps mapping tools spatial analysis geostatistics',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'dev': ['unittest'],
'test': ['unittest', 'nose', 'mock'],
},
# The choice of component versions is dictated by what's
# provisioned in the production servers.
install_requires=['pysal==1.9.1', 'scikit-learn==0.17.1'],
requires=['pysal', 'numpy', 'sklearn' ],
test_suite='test'
)
|
Revert "Declare scipy as dep"
|
Revert "Declare scipy as dep"
This reverts commit 1e8bc12e0a6ea2ffefe580b63133b88f4db045a7.
|
Python
|
bsd-3-clause
|
CartoDB/crankshaft,CartoDB/crankshaft
|
python
|
## Code Before:
from setuptools import setup, find_packages
setup(
name='crankshaft',
version='0.0.0',
description='CartoDB Spatial Analysis Python Library',
url='https://github.com/CartoDB/crankshaft',
author='Data Services Team - CartoDB',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Mapping comunity',
'Topic :: Maps :: Mapping Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='maps mapping tools spatial analysis geostatistics',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'dev': ['unittest'],
'test': ['unittest', 'nose', 'mock'],
},
# The choice of component versions is dictated by what's
# provisioned in the production servers.
install_requires=['scipy==0.17.1', 'pysal==1.9.1', 'scikit-learn==0.17.1'],
requires=['scipy', 'pysal', 'numpy', 'sklearn'],
test_suite='test'
)
## Instruction:
Revert "Declare scipy as dep"
This reverts commit 1e8bc12e0a6ea2ffefe580b63133b88f4db045a7.
## Code After:
from setuptools import setup, find_packages
setup(
name='crankshaft',
version='0.0.0',
description='CartoDB Spatial Analysis Python Library',
url='https://github.com/CartoDB/crankshaft',
author='Data Services Team - CartoDB',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Mapping comunity',
'Topic :: Maps :: Mapping Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='maps mapping tools spatial analysis geostatistics',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'dev': ['unittest'],
'test': ['unittest', 'nose', 'mock'],
},
# The choice of component versions is dictated by what's
# provisioned in the production servers.
install_requires=['pysal==1.9.1', 'scikit-learn==0.17.1'],
requires=['pysal', 'numpy', 'sklearn' ],
test_suite='test'
)
|
...
# The choice of component versions is dictated by what's
# provisioned in the production servers.
install_requires=['pysal==1.9.1', 'scikit-learn==0.17.1'],
requires=['pysal', 'numpy', 'sklearn' ],
test_suite='test'
)
...
|
60f398bb867e6cc382e4db9af69e670278ecbcfd
|
src/main/java/codechicken/lib/recipe/ConditionalIngredientFactory.java
|
src/main/java/codechicken/lib/recipe/ConditionalIngredientFactory.java
|
package codechicken.lib.recipe;
import com.google.gson.JsonObject;
import net.minecraft.item.crafting.Ingredient;
import net.minecraft.util.JsonUtils;
import net.minecraftforge.common.crafting.CraftingHelper;
import net.minecraftforge.common.crafting.IIngredientFactory;
import net.minecraftforge.common.crafting.JsonContext;
import javax.annotation.Nonnull;
/**
* Simple conditional ingredient.
*
* Created by covers1624 on 18/10/2017.
*/
public class ConditionalIngredientFactory implements IIngredientFactory {
@Nonnull
@Override
public Ingredient parse(JsonContext context, JsonObject json) {
if (!CraftingHelper.processConditions(JsonUtils.getJsonArray(json, "conditions"), context)) {
if (json.has("fail")) {
CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "failed_condition"), context);
}
return Ingredient.EMPTY;
}
return CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "pass"), context);
}
}
|
package codechicken.lib.recipe;
import com.google.gson.JsonObject;
import net.minecraft.item.crafting.Ingredient;
import net.minecraft.util.JsonUtils;
import net.minecraftforge.common.crafting.CraftingHelper;
import net.minecraftforge.common.crafting.IIngredientFactory;
import net.minecraftforge.common.crafting.JsonContext;
import javax.annotation.Nonnull;
/**
* Simple conditional ingredient.
*
* Created by covers1624 on 18/10/2017.
*/
public class ConditionalIngredientFactory implements IIngredientFactory {
@Nonnull
@Override
public Ingredient parse(JsonContext context, JsonObject json) {
if (!CraftingHelper.processConditions(JsonUtils.getJsonArray(json, "conditions"), context)) {
if (json.has("fail")) {
CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "fail"), context);
}
return Ingredient.EMPTY;
}
return CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "pass"), context);
}
}
|
Fix CondtionalIngredientFactory parsing of fail ingredient.
|
Fix CondtionalIngredientFactory parsing of fail ingredient.
|
Java
|
lgpl-2.1
|
TheCBProject/CodeChickenLib
|
java
|
## Code Before:
package codechicken.lib.recipe;
import com.google.gson.JsonObject;
import net.minecraft.item.crafting.Ingredient;
import net.minecraft.util.JsonUtils;
import net.minecraftforge.common.crafting.CraftingHelper;
import net.minecraftforge.common.crafting.IIngredientFactory;
import net.minecraftforge.common.crafting.JsonContext;
import javax.annotation.Nonnull;
/**
* Simple conditional ingredient.
*
* Created by covers1624 on 18/10/2017.
*/
public class ConditionalIngredientFactory implements IIngredientFactory {
@Nonnull
@Override
public Ingredient parse(JsonContext context, JsonObject json) {
if (!CraftingHelper.processConditions(JsonUtils.getJsonArray(json, "conditions"), context)) {
if (json.has("fail")) {
CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "failed_condition"), context);
}
return Ingredient.EMPTY;
}
return CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "pass"), context);
}
}
## Instruction:
Fix CondtionalIngredientFactory parsing of fail ingredient.
## Code After:
package codechicken.lib.recipe;
import com.google.gson.JsonObject;
import net.minecraft.item.crafting.Ingredient;
import net.minecraft.util.JsonUtils;
import net.minecraftforge.common.crafting.CraftingHelper;
import net.minecraftforge.common.crafting.IIngredientFactory;
import net.minecraftforge.common.crafting.JsonContext;
import javax.annotation.Nonnull;
/**
* Simple conditional ingredient.
*
* Created by covers1624 on 18/10/2017.
*/
public class ConditionalIngredientFactory implements IIngredientFactory {
@Nonnull
@Override
public Ingredient parse(JsonContext context, JsonObject json) {
if (!CraftingHelper.processConditions(JsonUtils.getJsonArray(json, "conditions"), context)) {
if (json.has("fail")) {
CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "fail"), context);
}
return Ingredient.EMPTY;
}
return CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "pass"), context);
}
}
|
// ... existing code ...
public Ingredient parse(JsonContext context, JsonObject json) {
if (!CraftingHelper.processConditions(JsonUtils.getJsonArray(json, "conditions"), context)) {
if (json.has("fail")) {
CraftingHelper.getIngredient(JsonUtils.getJsonObject(json, "fail"), context);
}
return Ingredient.EMPTY;
}
// ... rest of the code ...
|
841235452d92ea4e40853c8df51568e01b39dba8
|
stackoverflow/21180496/except.py
|
stackoverflow/21180496/except.py
|
import unittest
class MyException(Exception):
def __init__(self, message):
self.message = message
def RaiseException(message):
raise MyException(message)
class ExceptionTest(unittest.TestCase):
def verifyComplexException(self, exception_class, message, callable, *args):
with self.assertRaises(exception_class) as cm:
callable(*args)
exception = cm.exception
self.assertEqual(exception.message, message)
def testRaises(self):
self.verifyComplexException(MyException, 'asdf', RaiseException, 'asdf')
if __name__ == '__main__':
unittest.main()
|
"""Demonstration of catching an exception and verifying its fields."""
import unittest
class MyException(Exception):
def __init__(self, message):
self.message = message
def RaiseException(message):
raise MyException(message)
class ExceptionTest(unittest.TestCase):
def verifyComplexException(self, exception_class, message, callable, *args):
with self.assertRaises(exception_class) as cm:
callable(*args)
exception = cm.exception
self.assertEqual(exception.message, message)
def testRaises(self):
self.verifyComplexException(MyException, 'asdf', RaiseException, 'asdf')
if __name__ == '__main__':
unittest.main()
|
Convert top-level-comment to a docstring.
|
Convert top-level-comment to a docstring.
|
Python
|
apache-2.0
|
mbrukman/stackexchange-answers,mbrukman/stackexchange-answers
|
python
|
## Code Before:
import unittest
class MyException(Exception):
def __init__(self, message):
self.message = message
def RaiseException(message):
raise MyException(message)
class ExceptionTest(unittest.TestCase):
def verifyComplexException(self, exception_class, message, callable, *args):
with self.assertRaises(exception_class) as cm:
callable(*args)
exception = cm.exception
self.assertEqual(exception.message, message)
def testRaises(self):
self.verifyComplexException(MyException, 'asdf', RaiseException, 'asdf')
if __name__ == '__main__':
unittest.main()
## Instruction:
Convert top-level-comment to a docstring.
## Code After:
"""Demonstration of catching an exception and verifying its fields."""
import unittest
class MyException(Exception):
def __init__(self, message):
self.message = message
def RaiseException(message):
raise MyException(message)
class ExceptionTest(unittest.TestCase):
def verifyComplexException(self, exception_class, message, callable, *args):
with self.assertRaises(exception_class) as cm:
callable(*args)
exception = cm.exception
self.assertEqual(exception.message, message)
def testRaises(self):
self.verifyComplexException(MyException, 'asdf', RaiseException, 'asdf')
if __name__ == '__main__':
unittest.main()
|
// ... existing code ...
"""Demonstration of catching an exception and verifying its fields."""
import unittest
// ... rest of the code ...
|
83badd5cf2c7acd266977b8698d548d0de02c5c6
|
modules/electromagnetics/include/utils/ElkEnums.h
|
modules/electromagnetics/include/utils/ElkEnums.h
|
/** ElkEnums contains various enumerations useful in ELK, such as real/imag component definitions in
* Kernels, BCs, etc.
*/
namespace elk
{
enum ComponentEnum
{
REAL,
IMAGINARY
};
} // namespace elk
#endif // ELKENUMS_H
|
/** ElkEnums contains various enumerations useful in ELK, such as real/imag component definitions in
* Kernels, BCs, etc.
*/
namespace elk
{
enum ComponentEnum
{
REAL,
IMAGINARY
};
} // namespace elk
|
Convert utils to pragma once
|
Convert utils to pragma once
refs #21085
|
C
|
lgpl-2.1
|
idaholab/moose,milljm/moose,andrsd/moose,harterj/moose,lindsayad/moose,milljm/moose,harterj/moose,laagesen/moose,laagesen/moose,idaholab/moose,idaholab/moose,lindsayad/moose,andrsd/moose,dschwen/moose,laagesen/moose,laagesen/moose,idaholab/moose,lindsayad/moose,andrsd/moose,dschwen/moose,harterj/moose,dschwen/moose,milljm/moose,sapitts/moose,dschwen/moose,andrsd/moose,sapitts/moose,laagesen/moose,sapitts/moose,lindsayad/moose,milljm/moose,andrsd/moose,dschwen/moose,milljm/moose,harterj/moose,idaholab/moose,sapitts/moose,harterj/moose,lindsayad/moose,sapitts/moose
|
c
|
## Code Before:
/** ElkEnums contains various enumerations useful in ELK, such as real/imag component definitions in
* Kernels, BCs, etc.
*/
namespace elk
{
enum ComponentEnum
{
REAL,
IMAGINARY
};
} // namespace elk
#endif // ELKENUMS_H
## Instruction:
Convert utils to pragma once
refs #21085
## Code After:
/** ElkEnums contains various enumerations useful in ELK, such as real/imag component definitions in
* Kernels, BCs, etc.
*/
namespace elk
{
enum ComponentEnum
{
REAL,
IMAGINARY
};
} // namespace elk
|
...
IMAGINARY
};
} // namespace elk
...
|
50b6778ae43b8945b2073630e351ab759b007a3e
|
tests/social/youtube/test_tasks.py
|
tests/social/youtube/test_tasks.py
|
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
Switch to Jen's channel to (hopefully) make these tests faster.
|
Switch to Jen's channel to (hopefully) make these tests faster.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
python
|
## Code Before:
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
## Instruction:
Switch to Jen's channel to (hopefully) make these tests faster.
## Code After:
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
// ... existing code ...
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
// ... modified code ...
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
// ... rest of the code ...
|
f91da257161eba4039ee8946ead46dd4364406f1
|
uk.ac.bolton.archimate.canvas/src/uk/ac/bolton/archimate/canvas/templates/dialog/CanvasTemplateManagerDialog.java
|
uk.ac.bolton.archimate.canvas/src/uk/ac/bolton/archimate/canvas/templates/dialog/CanvasTemplateManagerDialog.java
|
/*******************************************************************************
* Copyright (c) 2010 Bolton University, UK.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*******************************************************************************/
package uk.ac.bolton.archimate.canvas.templates.dialog;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
import uk.ac.bolton.archimate.templates.dialog.TemplateManagerDialog;
import uk.ac.bolton.archimate.templates.model.TemplateManager;
/**
* Canvas Template Manager Dialog
*
* @author Phillip Beauvoir
*/
public class CanvasTemplateManagerDialog extends TemplateManagerDialog {
public CanvasTemplateManagerDialog(Shell parentShell, TemplateManager templateManager) {
super(parentShell, templateManager);
}
@Override
protected void configureShell(Shell shell) {
super.configureShell(shell);
shell.setText("My Canvasses");
}
@Override
protected Control createDialogArea(Composite parent) {
Control control = super.createDialogArea(parent);
setTitle("Manage Canvas Templates");
setMessage("Drag and drop Templates into Categories.");
return control;
}
}
|
/*******************************************************************************
* Copyright (c) 2010 Bolton University, UK.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*******************************************************************************/
package uk.ac.bolton.archimate.canvas.templates.dialog;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
import uk.ac.bolton.archimate.templates.dialog.TemplateManagerDialog;
import uk.ac.bolton.archimate.templates.model.TemplateManager;
/**
* Canvas Template Manager Dialog
*
* @author Phillip Beauvoir
*/
public class CanvasTemplateManagerDialog extends TemplateManagerDialog {
private static String HELP_ID = "uk.ac.bolton.archimate.help.CanvasTemplateManagerDialog"; //$NON-NLS-1$
public CanvasTemplateManagerDialog(Shell parentShell, TemplateManager templateManager) {
super(parentShell, templateManager);
}
@Override
protected void configureShell(Shell shell) {
super.configureShell(shell);
shell.setText("My Canvasses");
}
@Override
protected Control createDialogArea(Composite parent) {
Control control = super.createDialogArea(parent);
setTitle("Manage Canvas Templates");
setMessage("Drag and drop Templates into Categories.");
return control;
}
@Override
protected String getHelpID() {
return HELP_ID;
}
}
|
Use correct HELP_ID for Canvas Template Manager Dialog
|
Use correct HELP_ID for Canvas Template Manager Dialog
* HELP_ID was incorrect
|
Java
|
mit
|
archimatetool/archi,archimatetool/archi,archimatetool/archi
|
java
|
## Code Before:
/*******************************************************************************
* Copyright (c) 2010 Bolton University, UK.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*******************************************************************************/
package uk.ac.bolton.archimate.canvas.templates.dialog;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
import uk.ac.bolton.archimate.templates.dialog.TemplateManagerDialog;
import uk.ac.bolton.archimate.templates.model.TemplateManager;
/**
* Canvas Template Manager Dialog
*
* @author Phillip Beauvoir
*/
public class CanvasTemplateManagerDialog extends TemplateManagerDialog {
public CanvasTemplateManagerDialog(Shell parentShell, TemplateManager templateManager) {
super(parentShell, templateManager);
}
@Override
protected void configureShell(Shell shell) {
super.configureShell(shell);
shell.setText("My Canvasses");
}
@Override
protected Control createDialogArea(Composite parent) {
Control control = super.createDialogArea(parent);
setTitle("Manage Canvas Templates");
setMessage("Drag and drop Templates into Categories.");
return control;
}
}
## Instruction:
Use correct HELP_ID for Canvas Template Manager Dialog
* HELP_ID was incorrect
## Code After:
/*******************************************************************************
* Copyright (c) 2010 Bolton University, UK.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*******************************************************************************/
package uk.ac.bolton.archimate.canvas.templates.dialog;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Shell;
import uk.ac.bolton.archimate.templates.dialog.TemplateManagerDialog;
import uk.ac.bolton.archimate.templates.model.TemplateManager;
/**
* Canvas Template Manager Dialog
*
* @author Phillip Beauvoir
*/
public class CanvasTemplateManagerDialog extends TemplateManagerDialog {
private static String HELP_ID = "uk.ac.bolton.archimate.help.CanvasTemplateManagerDialog"; //$NON-NLS-1$
public CanvasTemplateManagerDialog(Shell parentShell, TemplateManager templateManager) {
super(parentShell, templateManager);
}
@Override
protected void configureShell(Shell shell) {
super.configureShell(shell);
shell.setText("My Canvasses");
}
@Override
protected Control createDialogArea(Composite parent) {
Control control = super.createDialogArea(parent);
setTitle("Manage Canvas Templates");
setMessage("Drag and drop Templates into Categories.");
return control;
}
@Override
protected String getHelpID() {
return HELP_ID;
}
}
|
// ... existing code ...
*/
public class CanvasTemplateManagerDialog extends TemplateManagerDialog {
private static String HELP_ID = "uk.ac.bolton.archimate.help.CanvasTemplateManagerDialog"; //$NON-NLS-1$
public CanvasTemplateManagerDialog(Shell parentShell, TemplateManager templateManager) {
super(parentShell, templateManager);
}
// ... modified code ...
return control;
}
@Override
protected String getHelpID() {
return HELP_ID;
}
}
// ... rest of the code ...
|
bd4b4af4596120d1deee6db4e420829c0d40e377
|
boris/settings/config.py
|
boris/settings/config.py
|
import os
import boris
from .base import STATIC_URL
# helper for building absolute paths
PROJECT_ROOT = os.path.abspath(os.path.dirname(boris.__file__))
p = lambda x: os.path.join(PROJECT_ROOT, x)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'boris.db',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'charset': 'utf8',
'init_command': 'SET '
'storage_engine=MyISAM,'
'character_set_connection=utf8,'
'collation_connection=utf8_general_ci'
},
},
}
STATIC_ROOT = p('static')
ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/"
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# List of callables that know how to import templates from various sources.
SECRET_KEY = 'od94mflb73jdjhw63hr7v9jfu7f6fhdujckwlqld87ff'
|
import os
import boris
from .base import STATIC_URL
# helper for building absolute paths
PROJECT_ROOT = os.path.abspath(os.path.dirname(boris.__file__))
p = lambda x: os.path.join(PROJECT_ROOT, x)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'boris',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'charset': 'utf8',
'init_command': 'SET '
'storage_engine=MyISAM,'
'character_set_connection=utf8,'
'collation_connection=utf8_general_ci'
},
},
}
STATIC_ROOT = p('static')
ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/"
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# List of callables that know how to import templates from various sources.
SECRET_KEY = 'od94mflb73jdjhw63hr7v9jfu7f6fhdujckwlqld87ff'
|
Change default database from sqlite to mysql.
|
Change default database from sqlite to mysql.
|
Python
|
mit
|
fragaria/BorIS,fragaria/BorIS,fragaria/BorIS
|
python
|
## Code Before:
import os
import boris
from .base import STATIC_URL
# helper for building absolute paths
PROJECT_ROOT = os.path.abspath(os.path.dirname(boris.__file__))
p = lambda x: os.path.join(PROJECT_ROOT, x)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'boris.db',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'charset': 'utf8',
'init_command': 'SET '
'storage_engine=MyISAM,'
'character_set_connection=utf8,'
'collation_connection=utf8_general_ci'
},
},
}
STATIC_ROOT = p('static')
ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/"
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# List of callables that know how to import templates from various sources.
SECRET_KEY = 'od94mflb73jdjhw63hr7v9jfu7f6fhdujckwlqld87ff'
## Instruction:
Change default database from sqlite to mysql.
## Code After:
import os
import boris
from .base import STATIC_URL
# helper for building absolute paths
PROJECT_ROOT = os.path.abspath(os.path.dirname(boris.__file__))
p = lambda x: os.path.join(PROJECT_ROOT, x)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'boris',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'charset': 'utf8',
'init_command': 'SET '
'storage_engine=MyISAM,'
'character_set_connection=utf8,'
'collation_connection=utf8_general_ci'
},
},
}
STATIC_ROOT = p('static')
ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/"
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# List of callables that know how to import templates from various sources.
SECRET_KEY = 'od94mflb73jdjhw63hr7v9jfu7f6fhdujckwlqld87ff'
|
...
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'boris',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
...
|
a03eb91088943a4b3ed0ae5fc87b104562a4a645
|
location_field/urls.py
|
location_field/urls.py
|
try:
from django.conf.urls import patterns # Django>=1.6
except ImportError:
from django.conf.urls.defaults import patterns # Django<1.6
import os
app_dir = os.path.dirname(__file__)
urlpatterns = patterns(
'',
(r'^media/(.*)$', 'django.views.static.serve', {
'document_root': '%s/media' % app_dir}),
)
|
from django.conf.urls import patterns
import os
app_dir = os.path.dirname(__file__)
urlpatterns = patterns(
'',
(r'^media/(.*)$', 'django.views.static.serve', {
'document_root': '%s/media' % app_dir}),
)
|
Drop support for Django 1.6
|
Drop support for Django 1.6
|
Python
|
mit
|
Mixser/django-location-field,recklessromeo/django-location-field,Mixser/django-location-field,voodmania/django-location-field,recklessromeo/django-location-field,undernewmanagement/django-location-field,voodmania/django-location-field,caioariede/django-location-field,caioariede/django-location-field,undernewmanagement/django-location-field,Mixser/django-location-field,undernewmanagement/django-location-field,caioariede/django-location-field,recklessromeo/django-location-field,voodmania/django-location-field
|
python
|
## Code Before:
try:
from django.conf.urls import patterns # Django>=1.6
except ImportError:
from django.conf.urls.defaults import patterns # Django<1.6
import os
app_dir = os.path.dirname(__file__)
urlpatterns = patterns(
'',
(r'^media/(.*)$', 'django.views.static.serve', {
'document_root': '%s/media' % app_dir}),
)
## Instruction:
Drop support for Django 1.6
## Code After:
from django.conf.urls import patterns
import os
app_dir = os.path.dirname(__file__)
urlpatterns = patterns(
'',
(r'^media/(.*)$', 'django.views.static.serve', {
'document_root': '%s/media' % app_dir}),
)
|
...
from django.conf.urls import patterns
import os
...
|
2844753a11f76da325f00b9f0c92ed281b235572
|
httpebble/src/com/lukekorth/httpebble/Httpebble.java
|
httpebble/src/com/lukekorth/httpebble/Httpebble.java
|
package com.lukekorth.httpebble;
import android.app.Activity;
import android.os.Bundle;
public class Httpebble extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
}
|
package com.lukekorth.httpebble;
import android.app.Dialog;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentActivity;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
public class Httpebble extends FragmentActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
@Override
protected void onResume() {
// Check that Google Play services is available
int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
// If Google Play services is unavailable
if (ConnectionResult.SUCCESS != resultCode) {
// Get the error dialog from Google Play services
Dialog errorDialog = GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0);
// If Google Play services can provide an error dialog
if (errorDialog != null) {
// Create a new DialogFragment for the error dialog
ErrorDialogFragment errorFragment = new ErrorDialogFragment();
// Set the dialog in the DialogFragment
errorFragment.setDialog(errorDialog);
// Show the error dialog in the DialogFragment
errorFragment.show(getSupportFragmentManager(), "Location Updates");
}
}
}
// Define a DialogFragment that displays the error dialog
public static class ErrorDialogFragment extends DialogFragment {
// Global field to contain the error dialog
private Dialog mDialog;
// Default constructor. Sets the dialog field to null
public ErrorDialogFragment() {
super();
mDialog = null;
}
// Set the dialog to display
public void setDialog(Dialog dialog) {
mDialog = dialog;
}
// Return a Dialog to the DialogFragment.
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return mDialog;
}
}
}
|
Add check for Google Play services
|
Add check for Google Play services
|
Java
|
mit
|
lkorth/httpebble-android
|
java
|
## Code Before:
package com.lukekorth.httpebble;
import android.app.Activity;
import android.os.Bundle;
public class Httpebble extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
}
## Instruction:
Add check for Google Play services
## Code After:
package com.lukekorth.httpebble;
import android.app.Dialog;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentActivity;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
public class Httpebble extends FragmentActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
@Override
protected void onResume() {
// Check that Google Play services is available
int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
// If Google Play services is unavailable
if (ConnectionResult.SUCCESS != resultCode) {
// Get the error dialog from Google Play services
Dialog errorDialog = GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0);
// If Google Play services can provide an error dialog
if (errorDialog != null) {
// Create a new DialogFragment for the error dialog
ErrorDialogFragment errorFragment = new ErrorDialogFragment();
// Set the dialog in the DialogFragment
errorFragment.setDialog(errorDialog);
// Show the error dialog in the DialogFragment
errorFragment.show(getSupportFragmentManager(), "Location Updates");
}
}
}
// Define a DialogFragment that displays the error dialog
public static class ErrorDialogFragment extends DialogFragment {
// Global field to contain the error dialog
private Dialog mDialog;
// Default constructor. Sets the dialog field to null
public ErrorDialogFragment() {
super();
mDialog = null;
}
// Set the dialog to display
public void setDialog(Dialog dialog) {
mDialog = dialog;
}
// Return a Dialog to the DialogFragment.
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return mDialog;
}
}
}
|
# ... existing code ...
package com.lukekorth.httpebble;
import android.app.Dialog;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentActivity;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesUtil;
public class Httpebble extends FragmentActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
# ... modified code ...
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
}
@Override
protected void onResume() {
// Check that Google Play services is available
int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this);
// If Google Play services is unavailable
if (ConnectionResult.SUCCESS != resultCode) {
// Get the error dialog from Google Play services
Dialog errorDialog = GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0);
// If Google Play services can provide an error dialog
if (errorDialog != null) {
// Create a new DialogFragment for the error dialog
ErrorDialogFragment errorFragment = new ErrorDialogFragment();
// Set the dialog in the DialogFragment
errorFragment.setDialog(errorDialog);
// Show the error dialog in the DialogFragment
errorFragment.show(getSupportFragmentManager(), "Location Updates");
}
}
}
// Define a DialogFragment that displays the error dialog
public static class ErrorDialogFragment extends DialogFragment {
// Global field to contain the error dialog
private Dialog mDialog;
// Default constructor. Sets the dialog field to null
public ErrorDialogFragment() {
super();
mDialog = null;
}
// Set the dialog to display
public void setDialog(Dialog dialog) {
mDialog = dialog;
}
// Return a Dialog to the DialogFragment.
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
return mDialog;
}
}
}
# ... rest of the code ...
|
c28ad1070b20841d4680e05030e2221c282110c0
|
utils/src/main/java/com/janosgyerik/utils/algorithm/sort/MergeSort.java
|
utils/src/main/java/com/janosgyerik/utils/algorithm/sort/MergeSort.java
|
package com.janosgyerik.utils.algorithm.sort;
public class MergeSort {
private MergeSort() {
// utility class, forbidden constructor
}
public static void sort(int[] arr) {
mergeSort(arr, 0, arr.length);
}
protected static void mergeSort(int[] arr, int from, int to) {
int diff = to - from;
if (diff < 2) {
return;
}
int mid = from + diff / 2;
mergeSort(arr, from, mid);
mergeSort(arr, mid, to);
merge(arr, from, mid, to);
}
protected static void merge(int[] arr, int from, int mid, int to) {
int[] sorted = new int[to - from];
for (int i = 0, pos1 = from, pos2 = mid; i < sorted.length; ++i) {
if (pos1 < mid && (pos2 >= to || arr[pos1] <= arr[pos2])) {
sorted[i] = arr[pos1++];
} else {
sorted[i] = arr[pos2++];
}
}
System.arraycopy(sorted, 0, arr, from, sorted.length);
}
}
|
package com.janosgyerik.utils.algorithm.sort;
public class MergeSort {
private MergeSort() {
// utility class, forbidden constructor
}
public static void sort(int[] arr) {
mergeSort(arr, 0, arr.length);
}
protected static void mergeSort(int[] arr, int from, int to) {
int diff = to - from;
if (diff < 2) {
return;
}
int mid = from + diff / 2;
mergeSort(arr, from, mid);
mergeSort(arr, mid, to);
merge(arr, from, mid, to);
}
static void merge(int[] arr, int from, int mid, int to) {
int[] sorted = new int[to - from];
int i = 0;
int pos1 = from;
int pos2 = mid;
while (pos1 < mid && pos2 < to) {
if (arr[pos1] <= arr[pos2]) {
sorted[i++] = arr[pos1++];
} else {
sorted[i++] = arr[pos2++];
}
}
while (pos1 < mid) {
sorted[i++] = arr[pos1++];
}
while (pos2 < to) {
sorted[i++] = arr[pos2++];
}
System.arraycopy(sorted, 0, arr, from, sorted.length);
}
}
|
Refactor mergeSort to more readable form
|
Refactor mergeSort to more readable form
|
Java
|
mit
|
janosgyerik/java-tools
|
java
|
## Code Before:
package com.janosgyerik.utils.algorithm.sort;
public class MergeSort {
private MergeSort() {
// utility class, forbidden constructor
}
public static void sort(int[] arr) {
mergeSort(arr, 0, arr.length);
}
protected static void mergeSort(int[] arr, int from, int to) {
int diff = to - from;
if (diff < 2) {
return;
}
int mid = from + diff / 2;
mergeSort(arr, from, mid);
mergeSort(arr, mid, to);
merge(arr, from, mid, to);
}
protected static void merge(int[] arr, int from, int mid, int to) {
int[] sorted = new int[to - from];
for (int i = 0, pos1 = from, pos2 = mid; i < sorted.length; ++i) {
if (pos1 < mid && (pos2 >= to || arr[pos1] <= arr[pos2])) {
sorted[i] = arr[pos1++];
} else {
sorted[i] = arr[pos2++];
}
}
System.arraycopy(sorted, 0, arr, from, sorted.length);
}
}
## Instruction:
Refactor mergeSort to more readable form
## Code After:
package com.janosgyerik.utils.algorithm.sort;
public class MergeSort {
private MergeSort() {
// utility class, forbidden constructor
}
public static void sort(int[] arr) {
mergeSort(arr, 0, arr.length);
}
protected static void mergeSort(int[] arr, int from, int to) {
int diff = to - from;
if (diff < 2) {
return;
}
int mid = from + diff / 2;
mergeSort(arr, from, mid);
mergeSort(arr, mid, to);
merge(arr, from, mid, to);
}
static void merge(int[] arr, int from, int mid, int to) {
int[] sorted = new int[to - from];
int i = 0;
int pos1 = from;
int pos2 = mid;
while (pos1 < mid && pos2 < to) {
if (arr[pos1] <= arr[pos2]) {
sorted[i++] = arr[pos1++];
} else {
sorted[i++] = arr[pos2++];
}
}
while (pos1 < mid) {
sorted[i++] = arr[pos1++];
}
while (pos2 < to) {
sorted[i++] = arr[pos2++];
}
System.arraycopy(sorted, 0, arr, from, sorted.length);
}
}
|
# ... existing code ...
merge(arr, from, mid, to);
}
static void merge(int[] arr, int from, int mid, int to) {
int[] sorted = new int[to - from];
int i = 0;
int pos1 = from;
int pos2 = mid;
while (pos1 < mid && pos2 < to) {
if (arr[pos1] <= arr[pos2]) {
sorted[i++] = arr[pos1++];
} else {
sorted[i++] = arr[pos2++];
}
}
while (pos1 < mid) {
sorted[i++] = arr[pos1++];
}
while (pos2 < to) {
sorted[i++] = arr[pos2++];
}
System.arraycopy(sorted, 0, arr, from, sorted.length);
}
# ... rest of the code ...
|
8c3e2393226e4968ea0f7539505c61a90a84ce4d
|
src/main/java/org/concord/datagraph/analysis/GraphAnalyzer.java
|
src/main/java/org/concord/datagraph/analysis/GraphAnalyzer.java
|
package org.concord.datagraph.analysis;
import java.awt.Component;
import java.util.ArrayList;
import org.concord.data.state.OTDataStore;
import org.concord.datagraph.analysis.rubric.GraphRubric;
import org.concord.datagraph.analysis.rubric.ResultSet;
import org.concord.datagraph.state.OTDataCollector;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.graph.util.state.OTHideableAnnotation;
public interface GraphAnalyzer {
public Graph getSegments(OTDataStore dataStore, int xChannel, int yChannel, double tolerance) throws IndexOutOfBoundsException;
public GraphRubric buildRubric(OTObjectList rubric);
public ResultSet compareGraphs(GraphRubric expected, Graph received);
public String getHtmlReasons(ResultSet results);
public void displayHtmlReasonsPopup(Component parent, ResultSet results);
public ArrayList<OTHideableAnnotation> annotateResults(OTDataCollector studentObject, ResultSet scoreResults);
}
|
package org.concord.datagraph.analysis;
import java.awt.Component;
import java.util.ArrayList;
import org.concord.data.state.OTDataStore;
import org.concord.datagraph.analysis.rubric.GraphRubric;
import org.concord.datagraph.analysis.rubric.ResultSet;
import org.concord.datagraph.state.OTDataCollector;
import org.concord.datagraph.state.OTDataGraphable;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.graph.util.state.OTHideableAnnotation;
public interface GraphAnalyzer {
public Graph getSegments(OTDataStore dataStore, int xChannel, int yChannel, double tolerance) throws IndexOutOfBoundsException;
public GraphRubric buildRubric(OTObjectList rubric);
public ResultSet compareGraphs(GraphRubric expected, Graph received);
public String getHtmlReasons(ResultSet results);
public void displayHtmlReasonsPopup(Component parent, ResultSet results);
public ArrayList<OTHideableAnnotation> annotateResults(OTDataCollector dataCollector, ResultSet scoreResults);
public OTDataGraphable drawSegmentResults(OTDataCollector dataCollector, Graph graph);
}
|
Add support for drawing the interpreted segments into a data collector.
|
Add support for drawing the interpreted segments into a data collector.
git-svn-id: a0d2519504059b70a86a1ce51b726c2279190bad@24833 6e01202a-0783-4428-890a-84243c50cc2b
|
Java
|
lgpl-2.1
|
concord-consortium/datagraph
|
java
|
## Code Before:
package org.concord.datagraph.analysis;
import java.awt.Component;
import java.util.ArrayList;
import org.concord.data.state.OTDataStore;
import org.concord.datagraph.analysis.rubric.GraphRubric;
import org.concord.datagraph.analysis.rubric.ResultSet;
import org.concord.datagraph.state.OTDataCollector;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.graph.util.state.OTHideableAnnotation;
public interface GraphAnalyzer {
public Graph getSegments(OTDataStore dataStore, int xChannel, int yChannel, double tolerance) throws IndexOutOfBoundsException;
public GraphRubric buildRubric(OTObjectList rubric);
public ResultSet compareGraphs(GraphRubric expected, Graph received);
public String getHtmlReasons(ResultSet results);
public void displayHtmlReasonsPopup(Component parent, ResultSet results);
public ArrayList<OTHideableAnnotation> annotateResults(OTDataCollector studentObject, ResultSet scoreResults);
}
## Instruction:
Add support for drawing the interpreted segments into a data collector.
git-svn-id: a0d2519504059b70a86a1ce51b726c2279190bad@24833 6e01202a-0783-4428-890a-84243c50cc2b
## Code After:
package org.concord.datagraph.analysis;
import java.awt.Component;
import java.util.ArrayList;
import org.concord.data.state.OTDataStore;
import org.concord.datagraph.analysis.rubric.GraphRubric;
import org.concord.datagraph.analysis.rubric.ResultSet;
import org.concord.datagraph.state.OTDataCollector;
import org.concord.datagraph.state.OTDataGraphable;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.graph.util.state.OTHideableAnnotation;
public interface GraphAnalyzer {
public Graph getSegments(OTDataStore dataStore, int xChannel, int yChannel, double tolerance) throws IndexOutOfBoundsException;
public GraphRubric buildRubric(OTObjectList rubric);
public ResultSet compareGraphs(GraphRubric expected, Graph received);
public String getHtmlReasons(ResultSet results);
public void displayHtmlReasonsPopup(Component parent, ResultSet results);
public ArrayList<OTHideableAnnotation> annotateResults(OTDataCollector dataCollector, ResultSet scoreResults);
public OTDataGraphable drawSegmentResults(OTDataCollector dataCollector, Graph graph);
}
|
# ... existing code ...
import org.concord.datagraph.analysis.rubric.GraphRubric;
import org.concord.datagraph.analysis.rubric.ResultSet;
import org.concord.datagraph.state.OTDataCollector;
import org.concord.datagraph.state.OTDataGraphable;
import org.concord.framework.otrunk.OTObjectList;
import org.concord.graph.util.state.OTHideableAnnotation;
# ... modified code ...
public ResultSet compareGraphs(GraphRubric expected, Graph received);
public String getHtmlReasons(ResultSet results);
public void displayHtmlReasonsPopup(Component parent, ResultSet results);
public ArrayList<OTHideableAnnotation> annotateResults(OTDataCollector dataCollector, ResultSet scoreResults);
public OTDataGraphable drawSegmentResults(OTDataCollector dataCollector, Graph graph);
}
# ... rest of the code ...
|
be082dc2aee440d7be1eda87422d25f607feb443
|
src/main/java/com/sunny/grokkingalgorithms/fasttrack/Poweroftwo.java
|
src/main/java/com/sunny/grokkingalgorithms/fasttrack/Poweroftwo.java
|
package com.sunny.grokkingalgorithms.fasttrack;
/**
* Created by sundas on 6/17/2018.
*/
public class Poweroftwo {
public static void main(String[] args) {
int a = 3;
while(a != 1){
if(a %2 == 0){
System.out.println("true");
break;
}
a = a/2;
}
}
}
|
package com.sunny.grokkingalgorithms.fasttrack;
/**
* Created by sundas on 6/17/2018.
*/
public class Poweroftwo {
public static void main(String[] args) {
int a = 3;
while(a != 1){
if(a %2 == 0){
System.out.println("true");
break;
}
a = a/2;
}
a = 4;
int x = a&(a - 1);
System.out.println(x == 0);
}
}
|
Check if a number is power of 2
|
Check if a number is power of 2
|
Java
|
mit
|
sunnydas/grokkingalgos,sunnydas/grokkingalgos
|
java
|
## Code Before:
package com.sunny.grokkingalgorithms.fasttrack;
/**
* Created by sundas on 6/17/2018.
*/
public class Poweroftwo {
public static void main(String[] args) {
int a = 3;
while(a != 1){
if(a %2 == 0){
System.out.println("true");
break;
}
a = a/2;
}
}
}
## Instruction:
Check if a number is power of 2
## Code After:
package com.sunny.grokkingalgorithms.fasttrack;
/**
* Created by sundas on 6/17/2018.
*/
public class Poweroftwo {
public static void main(String[] args) {
int a = 3;
while(a != 1){
if(a %2 == 0){
System.out.println("true");
break;
}
a = a/2;
}
a = 4;
int x = a&(a - 1);
System.out.println(x == 0);
}
}
|
# ... existing code ...
a = a/2;
}
a = 4;
int x = a&(a - 1);
System.out.println(x == 0);
}
}
# ... rest of the code ...
|
9340480f46eb97d0774b77f47c316fe0bb4632e5
|
dhash/dhblock_chash_srv.h
|
dhash/dhblock_chash_srv.h
|
class pmaint;
struct block_info;
struct adb_keyaux_t;
enum adb_status;
// Internal implementation of content hash repair_job logic.
class rjchash;
class dhblock_chash_srv : public dhblock_srv {
friend class rjchash;
ptr<adb> cache_db;
pmaint *pmaint_obj;
void localqueue (u_int32_t frags, clnt_stat err, adb_status stat, vec<block_info> keys);
public:
dhblock_chash_srv (ptr<vnode> node, ptr<dhashcli> cli, str dbname, str dbext,
str desc, cbv donecb);
~dhblock_chash_srv ();
void start (bool randomize);
void stop ();
void store (chordID k, str d, cb_dhstat cb);
void offer (user_args *sbp, dhash_offer_arg *arg);
void stats (vec<dstat> &s);
void generate_repair_jobs ();
};
|
class pmaint;
struct block_info;
struct adb_keyaux_t;
enum adb_status;
// Internal implementation of content hash repair_job logic.
class rjchash;
class dhblock_chash_srv : public dhblock_srv {
friend class rjchash;
ptr<adb> cache_db;
pmaint *pmaint_obj;
void localqueue (u_int32_t frags, clnt_stat err, adb_status stat, vec<block_info> keys);
public:
dhblock_chash_srv (ptr<vnode> node, ptr<dhashcli> cli,
str desc, str dbname, str dbext, cbv donecb);
~dhblock_chash_srv ();
void start (bool randomize);
void stop ();
void store (chordID k, str d, cb_dhstat cb);
void offer (user_args *sbp, dhash_offer_arg *arg);
void stats (vec<dstat> &s);
void generate_repair_jobs ();
};
|
Fix mis-named parameters in header.
|
Fix mis-named parameters in header.
|
C
|
mit
|
weidezhang/dht,sit/dht,sit/dht,sit/dht,weidezhang/dht,weidezhang/dht,sit/dht,sit/dht,weidezhang/dht,weidezhang/dht
|
c
|
## Code Before:
class pmaint;
struct block_info;
struct adb_keyaux_t;
enum adb_status;
// Internal implementation of content hash repair_job logic.
class rjchash;
class dhblock_chash_srv : public dhblock_srv {
friend class rjchash;
ptr<adb> cache_db;
pmaint *pmaint_obj;
void localqueue (u_int32_t frags, clnt_stat err, adb_status stat, vec<block_info> keys);
public:
dhblock_chash_srv (ptr<vnode> node, ptr<dhashcli> cli, str dbname, str dbext,
str desc, cbv donecb);
~dhblock_chash_srv ();
void start (bool randomize);
void stop ();
void store (chordID k, str d, cb_dhstat cb);
void offer (user_args *sbp, dhash_offer_arg *arg);
void stats (vec<dstat> &s);
void generate_repair_jobs ();
};
## Instruction:
Fix mis-named parameters in header.
## Code After:
class pmaint;
struct block_info;
struct adb_keyaux_t;
enum adb_status;
// Internal implementation of content hash repair_job logic.
class rjchash;
class dhblock_chash_srv : public dhblock_srv {
friend class rjchash;
ptr<adb> cache_db;
pmaint *pmaint_obj;
void localqueue (u_int32_t frags, clnt_stat err, adb_status stat, vec<block_info> keys);
public:
dhblock_chash_srv (ptr<vnode> node, ptr<dhashcli> cli,
str desc, str dbname, str dbext, cbv donecb);
~dhblock_chash_srv ();
void start (bool randomize);
void stop ();
void store (chordID k, str d, cb_dhstat cb);
void offer (user_args *sbp, dhash_offer_arg *arg);
void stats (vec<dstat> &s);
void generate_repair_jobs ();
};
|
...
void localqueue (u_int32_t frags, clnt_stat err, adb_status stat, vec<block_info> keys);
public:
dhblock_chash_srv (ptr<vnode> node, ptr<dhashcli> cli,
str desc, str dbname, str dbext, cbv donecb);
~dhblock_chash_srv ();
void start (bool randomize);
...
|
ac4426825b2c9e388163a26d651861b11e4c6cce
|
src/main/kotlin/me/mrkirby153/KirBot/music/AudioPlayerSendHandler.kt
|
src/main/kotlin/me/mrkirby153/KirBot/music/AudioPlayerSendHandler.kt
|
package me.mrkirby153.KirBot.music
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer
import com.sedmelluq.discord.lavaplayer.track.playback.AudioFrame
import net.dv8tion.jda.core.audio.AudioSendHandler
class AudioPlayerSendHandler(val audioPlayer: AudioPlayer): AudioSendHandler {
private var lastFrame: AudioFrame? = null
override fun provide20MsAudio(): ByteArray = lastFrame!!.data;
override fun canProvide(): Boolean {
lastFrame = audioPlayer.provide()
return lastFrame != null
}
override fun isOpus() = true
}
|
package me.mrkirby153.KirBot.music
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer
import com.sedmelluq.discord.lavaplayer.track.playback.AudioFrame
import net.dv8tion.jda.core.audio.AudioSendHandler
class AudioPlayerSendHandler(val audioPlayer: AudioPlayer) : AudioSendHandler {
private var lastFrame: AudioFrame? = null
override fun provide20MsAudio(): ByteArray? {
if (lastFrame == null)
lastFrame = audioPlayer.provide()
val data = lastFrame?.data
lastFrame = null
return data
}
override fun canProvide(): Boolean {
lastFrame = audioPlayer.provide()
return lastFrame != null
}
override fun isOpus() = true
}
|
Fix race condition that would cause streams to fail sometimes
|
Fix race condition that would cause streams to fail sometimes
|
Kotlin
|
mit
|
mrkirby153/KirBot
|
kotlin
|
## Code Before:
package me.mrkirby153.KirBot.music
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer
import com.sedmelluq.discord.lavaplayer.track.playback.AudioFrame
import net.dv8tion.jda.core.audio.AudioSendHandler
class AudioPlayerSendHandler(val audioPlayer: AudioPlayer): AudioSendHandler {
private var lastFrame: AudioFrame? = null
override fun provide20MsAudio(): ByteArray = lastFrame!!.data;
override fun canProvide(): Boolean {
lastFrame = audioPlayer.provide()
return lastFrame != null
}
override fun isOpus() = true
}
## Instruction:
Fix race condition that would cause streams to fail sometimes
## Code After:
package me.mrkirby153.KirBot.music
import com.sedmelluq.discord.lavaplayer.player.AudioPlayer
import com.sedmelluq.discord.lavaplayer.track.playback.AudioFrame
import net.dv8tion.jda.core.audio.AudioSendHandler
class AudioPlayerSendHandler(val audioPlayer: AudioPlayer) : AudioSendHandler {
private var lastFrame: AudioFrame? = null
override fun provide20MsAudio(): ByteArray? {
if (lastFrame == null)
lastFrame = audioPlayer.provide()
val data = lastFrame?.data
lastFrame = null
return data
}
override fun canProvide(): Boolean {
lastFrame = audioPlayer.provide()
return lastFrame != null
}
override fun isOpus() = true
}
|
# ... existing code ...
import com.sedmelluq.discord.lavaplayer.track.playback.AudioFrame
import net.dv8tion.jda.core.audio.AudioSendHandler
class AudioPlayerSendHandler(val audioPlayer: AudioPlayer) : AudioSendHandler {
private var lastFrame: AudioFrame? = null
override fun provide20MsAudio(): ByteArray? {
if (lastFrame == null)
lastFrame = audioPlayer.provide()
val data = lastFrame?.data
lastFrame = null
return data
}
override fun canProvide(): Boolean {
lastFrame = audioPlayer.provide()
# ... rest of the code ...
|
4f8aed6ed3491e62911619eaa9aa4b86b30065e4
|
leonardo/module/leonardo_auth/widget/userlogin/models.py
|
leonardo/module/leonardo_auth/widget/userlogin/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
else:
context['next'] = request.path
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
|
Fix missing next in context.
|
Fix missing next in context.
|
Python
|
bsd-3-clause
|
django-leonardo/django-leonardo,django-leonardo/django-leonardo,django-leonardo/django-leonardo,django-leonardo/django-leonardo
|
python
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
## Instruction:
Fix missing next in context.
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
else:
context['next'] = request.path
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
|
# ... existing code ...
if 'next' in request.GET:
context['next'] = request.GET['next']
else:
context['next'] = request.path
return context
# ... rest of the code ...
|
97b7ba9d4d6bf948435ce58dd21b60d78d75fd29
|
lib-dynload/lzo/__init__.py
|
lib-dynload/lzo/__init__.py
|
import sys
import os
p1, p2 = sys.version_info[:2]
curpath = os.path.abspath( sys.argv[0] )
if os.path.islink(curpath):
curpath = os.readlink(curpath)
currentdir = os.path.dirname( curpath )
build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") )
dirs = os.listdir(build_dir)
for d in dirs:
if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1:
sys.path.insert(0, os.path.join(build_dir, d) )
import importlib
module = importlib.import_module("_lzo")
compress = module.compress
decompress = module.decompress
sys.path.pop(0)
break
|
import sys
import os
p1, p2 = sys.version_info[:2]
curpath = os.path.abspath( sys.argv[0] )
if os.path.islink(curpath):
curpath = os.readlink(curpath)
currentdir = os.path.dirname( curpath )
build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") )
dirs = os.listdir(build_dir)
for d in dirs:
if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1:
sys.path.insert(0, os.path.join(build_dir, d) )
import importlib
module = importlib.import_module("_lzo")
module.set_block_size(16*1024*1024)
compress = module.compress
decompress = module.decompress
sys.path.pop(0)
break
|
Adjust maximum block size for lzo
|
Adjust maximum block size for lzo
|
Python
|
mit
|
sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs
|
python
|
## Code Before:
import sys
import os
p1, p2 = sys.version_info[:2]
curpath = os.path.abspath( sys.argv[0] )
if os.path.islink(curpath):
curpath = os.readlink(curpath)
currentdir = os.path.dirname( curpath )
build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") )
dirs = os.listdir(build_dir)
for d in dirs:
if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1:
sys.path.insert(0, os.path.join(build_dir, d) )
import importlib
module = importlib.import_module("_lzo")
compress = module.compress
decompress = module.decompress
sys.path.pop(0)
break
## Instruction:
Adjust maximum block size for lzo
## Code After:
import sys
import os
p1, p2 = sys.version_info[:2]
curpath = os.path.abspath( sys.argv[0] )
if os.path.islink(curpath):
curpath = os.readlink(curpath)
currentdir = os.path.dirname( curpath )
build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") )
if not os.path.isdir(build_dir):
build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") )
dirs = os.listdir(build_dir)
for d in dirs:
if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1:
sys.path.insert(0, os.path.join(build_dir, d) )
import importlib
module = importlib.import_module("_lzo")
module.set_block_size(16*1024*1024)
compress = module.compress
decompress = module.decompress
sys.path.pop(0)
break
|
...
import importlib
module = importlib.import_module("_lzo")
module.set_block_size(16*1024*1024)
compress = module.compress
decompress = module.decompress
...
|
e2ffc3d09157a0bfba4a0bcaca98691d99d04d77
|
Scripted/CIP_/CIP/ui/__init__.py
|
Scripted/CIP_/CIP/ui/__init__.py
|
from .CIP_EditorWidget import CIP_EditorWidget
from .CIP_EditBox import *
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
from .CollapsibleMultilineText import *
from .PdfReporter import *
#from ACIL_GetImage.CaseNavigatorWidget import *
#from AutoUpdateWidget import AutoUpdateWidget
# import os
# CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons')
# del os
|
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
from .CollapsibleMultilineText import *
from .PdfReporter import *
#from ACIL_GetImage.CaseNavigatorWidget import *
#from AutoUpdateWidget import AutoUpdateWidget
# import os
# CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons')
# del os
|
Make CIP compatible with Slicer 5
|
ENH: Make CIP compatible with Slicer 5
- remove CIP UI Editor includes to enable CIP loading in Slicer preview
|
Python
|
bsd-3-clause
|
acil-bwh/SlicerCIP,acil-bwh/SlicerCIP,acil-bwh/SlicerCIP,acil-bwh/SlicerCIP
|
python
|
## Code Before:
from .CIP_EditorWidget import CIP_EditorWidget
from .CIP_EditBox import *
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
from .CollapsibleMultilineText import *
from .PdfReporter import *
#from ACIL_GetImage.CaseNavigatorWidget import *
#from AutoUpdateWidget import AutoUpdateWidget
# import os
# CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons')
# del os
## Instruction:
ENH: Make CIP compatible with Slicer 5
- remove CIP UI Editor includes to enable CIP loading in Slicer preview
## Code After:
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
from .CollapsibleMultilineText import *
from .PdfReporter import *
#from ACIL_GetImage.CaseNavigatorWidget import *
#from AutoUpdateWidget import AutoUpdateWidget
# import os
# CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons')
# del os
|
# ... existing code ...
from .CaseReportsWidget import *
from .PreProcessingWidget import *
from .MIPViewerWidget import *
# ... rest of the code ...
|
a80d47e9153939ed44799e7e0d9c470df3a45b3c
|
src/main/java/se/kits/gakusei/config/UserDetailsServiceImpl.java
|
src/main/java/se/kits/gakusei/config/UserDetailsServiceImpl.java
|
package se.kits.gakusei.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import se.kits.gakusei.user.model.User;
import se.kits.gakusei.user.repository.UserRepository;
@Service("userDetailsService")
public class UserDetailsServiceImpl implements UserDetailsService {
private final UserRepository repository;
@Autowired
public UserDetailsServiceImpl(UserRepository repository) {
this.repository = repository;
}
@Override
public UserDetails loadUserByUsername(String name) throws UsernameNotFoundException {
User user = this.repository.findByUsername(name);
return new org.springframework.security.core.userdetails.User(user.getUsername(), user.getPassword(),
AuthorityUtils.createAuthorityList(user.getRole()));
}
}
|
package se.kits.gakusei.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import se.kits.gakusei.user.model.User;
import se.kits.gakusei.user.repository.UserRepository;
@Service("userDetailsService")
public class UserDetailsServiceImpl implements UserDetailsService {
private final UserRepository repository;
@Autowired
public UserDetailsServiceImpl(UserRepository repository) {
this.repository = repository;
}
@Override
public UserDetails loadUserByUsername(String name) throws UsernameNotFoundException {
User user = this.repository.findByUsername(name);
if (user == null) throw new UsernameNotFoundException("User not found: " + name);
return new org.springframework.security.core.userdetails.User(user.getUsername(), user.getPassword(),
AuthorityUtils.createAuthorityList(user.getRole()));
}
}
|
Fix exception when trying to login with non-existing user
|
Fix exception when trying to login with non-existing user
|
Java
|
mit
|
kits-ab/gakusei,kits-ab/gakusei,kits-ab/gakusei
|
java
|
## Code Before:
package se.kits.gakusei.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import se.kits.gakusei.user.model.User;
import se.kits.gakusei.user.repository.UserRepository;
@Service("userDetailsService")
public class UserDetailsServiceImpl implements UserDetailsService {
private final UserRepository repository;
@Autowired
public UserDetailsServiceImpl(UserRepository repository) {
this.repository = repository;
}
@Override
public UserDetails loadUserByUsername(String name) throws UsernameNotFoundException {
User user = this.repository.findByUsername(name);
return new org.springframework.security.core.userdetails.User(user.getUsername(), user.getPassword(),
AuthorityUtils.createAuthorityList(user.getRole()));
}
}
## Instruction:
Fix exception when trying to login with non-existing user
## Code After:
package se.kits.gakusei.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.stereotype.Service;
import se.kits.gakusei.user.model.User;
import se.kits.gakusei.user.repository.UserRepository;
@Service("userDetailsService")
public class UserDetailsServiceImpl implements UserDetailsService {
private final UserRepository repository;
@Autowired
public UserDetailsServiceImpl(UserRepository repository) {
this.repository = repository;
}
@Override
public UserDetails loadUserByUsername(String name) throws UsernameNotFoundException {
User user = this.repository.findByUsername(name);
if (user == null) throw new UsernameNotFoundException("User not found: " + name);
return new org.springframework.security.core.userdetails.User(user.getUsername(), user.getPassword(),
AuthorityUtils.createAuthorityList(user.getRole()));
}
}
|
...
@Override
public UserDetails loadUserByUsername(String name) throws UsernameNotFoundException {
User user = this.repository.findByUsername(name);
if (user == null) throw new UsernameNotFoundException("User not found: " + name);
return new org.springframework.security.core.userdetails.User(user.getUsername(), user.getPassword(),
AuthorityUtils.createAuthorityList(user.getRole()));
}
...
|
2d39aed3dcdb28acc61a6598cca9836665c2674e
|
cs251tk/student/markdownify/check_submit_date.py
|
cs251tk/student/markdownify/check_submit_date.py
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
|
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
|
Modify error check in check_dates()
|
Modify error check in check_dates()
|
Python
|
mit
|
StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit
|
python
|
## Code Before:
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
_, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if res[0] is not 'f':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
## Instruction:
Modify error check in check_dates()
## Code After:
import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Sort dates earliest to latest
dates.sort()
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return dates[0].strftime("%x %X")
|
# ... existing code ...
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
stat, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if stat is 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# ... rest of the code ...
|
ce37c4c11c5fb76788b2b6a04c51107941b6f518
|
src/main.c
|
src/main.c
|
//
// main.c
// converter - Command-line number converter to Mac OS
//
// Created by Paulo Ricardo Paz Vital on 23/05/15.
// Copyright (c) 2015 pvital Solutions. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
void usage(void) {
printf("usage: convert decimal_number \n");
}
int main(int argc, const char * argv[]) {
int decimal;
if (argc < 2 || argc > 3) {
usage();
return -1;
}
decimal = atoi(argv[1]);
if (decimal < 0) {
printf("ERROR: decimal number must be greater than zero (0).\n");
return -1;
}
if (decimal > INT_MAX) {
printf("ERROR: maximum decimal number supported is %d.\n", INT_MAX);
return -1;
}
return 0;
}
|
//
// main.c
// converter - Command-line number converter to Mac OS
//
// Created by Paulo Ricardo Paz Vital on 23/05/15.
// Copyright (c) 2015 pvital Solutions. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
void usage(void) {
printf("usage: convert decimal_number \n");
}
void dec2bin(int decimal) {
int remainder[32];
int quocient = decimal, i = 0;
while (quocient >= 2) {
remainder[i] = quocient % 2;
quocient = quocient / 2;
i++;
}
// add the last quocient in the end of remainder list
remainder[i] = quocient;
// print the remainder list in the revert order
printf ("The decimal number %d in binary is: ", decimal);
while (i >= 0) {
printf("%d", remainder[i]);
i--;
}
printf("\n");
}
int main(int argc, const char * argv[]) {
int decimal;
if (argc < 2 || argc > 3) {
usage();
return -1;
}
decimal = atoi(argv[1]);
if (decimal < 0) {
printf("ERROR: decimal number must be greater than zero (0).\n");
return -1;
}
if (decimal > INT_MAX) {
printf("ERROR: maximum decimal number supported is %d.\n", INT_MAX);
return -1;
}
dec2bin(decimal);
return 0;
}
|
Add decimal to binary conversion.
|
Add decimal to binary conversion.
Add function to convert decimal to binary.
Signed-off-by: Paulo Vital <[email protected]>
|
C
|
mit
|
pvital/converter
|
c
|
## Code Before:
//
// main.c
// converter - Command-line number converter to Mac OS
//
// Created by Paulo Ricardo Paz Vital on 23/05/15.
// Copyright (c) 2015 pvital Solutions. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
void usage(void) {
printf("usage: convert decimal_number \n");
}
int main(int argc, const char * argv[]) {
int decimal;
if (argc < 2 || argc > 3) {
usage();
return -1;
}
decimal = atoi(argv[1]);
if (decimal < 0) {
printf("ERROR: decimal number must be greater than zero (0).\n");
return -1;
}
if (decimal > INT_MAX) {
printf("ERROR: maximum decimal number supported is %d.\n", INT_MAX);
return -1;
}
return 0;
}
## Instruction:
Add decimal to binary conversion.
Add function to convert decimal to binary.
Signed-off-by: Paulo Vital <[email protected]>
## Code After:
//
// main.c
// converter - Command-line number converter to Mac OS
//
// Created by Paulo Ricardo Paz Vital on 23/05/15.
// Copyright (c) 2015 pvital Solutions. All rights reserved.
//
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
void usage(void) {
printf("usage: convert decimal_number \n");
}
void dec2bin(int decimal) {
int remainder[32];
int quocient = decimal, i = 0;
while (quocient >= 2) {
remainder[i] = quocient % 2;
quocient = quocient / 2;
i++;
}
// add the last quocient in the end of remainder list
remainder[i] = quocient;
// print the remainder list in the revert order
printf ("The decimal number %d in binary is: ", decimal);
while (i >= 0) {
printf("%d", remainder[i]);
i--;
}
printf("\n");
}
int main(int argc, const char * argv[]) {
int decimal;
if (argc < 2 || argc > 3) {
usage();
return -1;
}
decimal = atoi(argv[1]);
if (decimal < 0) {
printf("ERROR: decimal number must be greater than zero (0).\n");
return -1;
}
if (decimal > INT_MAX) {
printf("ERROR: maximum decimal number supported is %d.\n", INT_MAX);
return -1;
}
dec2bin(decimal);
return 0;
}
|
// ... existing code ...
void usage(void) {
printf("usage: convert decimal_number \n");
}
void dec2bin(int decimal) {
int remainder[32];
int quocient = decimal, i = 0;
while (quocient >= 2) {
remainder[i] = quocient % 2;
quocient = quocient / 2;
i++;
}
// add the last quocient in the end of remainder list
remainder[i] = quocient;
// print the remainder list in the revert order
printf ("The decimal number %d in binary is: ", decimal);
while (i >= 0) {
printf("%d", remainder[i]);
i--;
}
printf("\n");
}
int main(int argc, const char * argv[]) {
// ... modified code ...
printf("ERROR: maximum decimal number supported is %d.\n", INT_MAX);
return -1;
}
dec2bin(decimal);
return 0;
}
// ... rest of the code ...
|
cd7e4b63ebbb2c14d27d9bcdeb3d6388dff58522
|
serversaturday-common/src/main/java/com/campmongoose/serversaturday/common/submission/AbstractSubmitter.java
|
serversaturday-common/src/main/java/com/campmongoose/serversaturday/common/submission/AbstractSubmitter.java
|
package com.campmongoose.serversaturday.common.submission;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public abstract class AbstractSubmitter<B extends AbstractBuild, I, L> {
protected final Map<String, B> builds = new HashMap<>();
@Nonnull
protected final String name;
@Nonnull
protected final UUID uuid;
protected AbstractSubmitter(@Nonnull String name, @Nonnull UUID uuid) {
this.name = name;
this.uuid = uuid;
}
@Nullable
public B getBuild(@Nonnull String name) {
return builds.get(name);
}
@Nonnull
public List<B> getBuilds() {
return new ArrayList<>(builds.values());
}
@Nonnull
public abstract I getMenuRepresentation();
@Nonnull
public String getName() {
return name;
}
@Nonnull
public UUID getUUID() {
return uuid;
}
@Nonnull
public abstract B newBuild(@Nonnull String name, @Nonnull L location);
public boolean removeBuild(@Nonnull String name) {
return builds.remove(name) != null;
}
public abstract void save(@Nonnull File file);
}
|
package com.campmongoose.serversaturday.common.submission;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public abstract class AbstractSubmitter<B extends AbstractBuild, I, L> {
protected final Map<String, B> builds = new HashMap<>();
@Nonnull
protected final String name;
@Nonnull
protected final UUID uuid;
protected AbstractSubmitter(@Nonnull String name, @Nonnull UUID uuid) {
this.name = name;
this.uuid = uuid;
}
@Nullable
public B getBuild(@Nonnull String name) {
return builds.get(name);
}
@Nonnull
public List<B> getBuilds() {
return new ArrayList<>(builds.values());
}
@Nonnull
public abstract I getMenuRepresentation();
@Nonnull
public String getName() {
return name;
}
@Nonnull
public UUID getUUID() {
return uuid;
}
@Nonnull
public abstract B newBuild(@Nonnull String name, @Nonnull L location);
public boolean removeBuild(@Nonnull String name) {
return builds.remove(name) != null;
}
public void renameBuild(String newName, B build) {
builds.remove(build.getName());
build.setName(newName);
builds.put(newName, build);
}
public abstract void save(@Nonnull File file);
}
|
Fix an issue with updating builds.
|
Fix an issue with updating builds.
|
Java
|
mit
|
Musician101/ServerSaturday
|
java
|
## Code Before:
package com.campmongoose.serversaturday.common.submission;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public abstract class AbstractSubmitter<B extends AbstractBuild, I, L> {
protected final Map<String, B> builds = new HashMap<>();
@Nonnull
protected final String name;
@Nonnull
protected final UUID uuid;
protected AbstractSubmitter(@Nonnull String name, @Nonnull UUID uuid) {
this.name = name;
this.uuid = uuid;
}
@Nullable
public B getBuild(@Nonnull String name) {
return builds.get(name);
}
@Nonnull
public List<B> getBuilds() {
return new ArrayList<>(builds.values());
}
@Nonnull
public abstract I getMenuRepresentation();
@Nonnull
public String getName() {
return name;
}
@Nonnull
public UUID getUUID() {
return uuid;
}
@Nonnull
public abstract B newBuild(@Nonnull String name, @Nonnull L location);
public boolean removeBuild(@Nonnull String name) {
return builds.remove(name) != null;
}
public abstract void save(@Nonnull File file);
}
## Instruction:
Fix an issue with updating builds.
## Code After:
package com.campmongoose.serversaturday.common.submission;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public abstract class AbstractSubmitter<B extends AbstractBuild, I, L> {
protected final Map<String, B> builds = new HashMap<>();
@Nonnull
protected final String name;
@Nonnull
protected final UUID uuid;
protected AbstractSubmitter(@Nonnull String name, @Nonnull UUID uuid) {
this.name = name;
this.uuid = uuid;
}
@Nullable
public B getBuild(@Nonnull String name) {
return builds.get(name);
}
@Nonnull
public List<B> getBuilds() {
return new ArrayList<>(builds.values());
}
@Nonnull
public abstract I getMenuRepresentation();
@Nonnull
public String getName() {
return name;
}
@Nonnull
public UUID getUUID() {
return uuid;
}
@Nonnull
public abstract B newBuild(@Nonnull String name, @Nonnull L location);
public boolean removeBuild(@Nonnull String name) {
return builds.remove(name) != null;
}
public void renameBuild(String newName, B build) {
builds.remove(build.getName());
build.setName(newName);
builds.put(newName, build);
}
public abstract void save(@Nonnull File file);
}
|
# ... existing code ...
return builds.remove(name) != null;
}
public void renameBuild(String newName, B build) {
builds.remove(build.getName());
build.setName(newName);
builds.put(newName, build);
}
public abstract void save(@Nonnull File file);
}
# ... rest of the code ...
|
03cbdc9d568682564dbdd115ddce031aae1da6c5
|
runtime/src/launch/zebra/launch-zebra.c
|
runtime/src/launch/zebra/launch-zebra.c
|
static char* chpl_launch_create_command(int argc, char* argv[],
int32_t numLocales) {
int i;
int size;
char baseCommand[256];
char* command;
chpl_compute_real_binary_name(argv[0]);
sprintf(baseCommand, "zebra -fast -r %s", chpl_get_real_binary_name());
size = strlen(baseCommand) + 1;
for (i=1; i<argc; i++) {
size += strlen(argv[i]) + 3;
}
command = chpl_malloc(size, sizeof(char), CHPL_RT_MD_COMMAND_BUFFER, -1, "");
sprintf(command, "%s", baseCommand);
for (i=1; i<argc; i++) {
strcat(command, " '");
strcat(command, argv[i]);
strcat(command, "'");
}
if (strlen(command)+1 > size) {
chpl_internal_error("buffer overflow");
}
return command;
}
int chpl_launch(int argc, char* argv[], int32_t numLocales) {
return chpl_launch_using_system(chpl_launch_create_command(argc, argv, numLocales),
argv[0]);
}
int chpl_launch_handle_arg(int argc, char* argv[], int argNum,
int32_t lineno, chpl_string filename) {
return 0;
}
void chpl_launch_print_help(void) {
}
|
static char** chpl_launch_create_argv(int argc, char* argv[]) {
const int largc = 3;
char *largv[largc];
largv[0] = (char *) "zebra";
largv[1] = (char *) "-fast";
largv[2] = (char *) "-r";
return chpl_bundle_exec_args(argc, argv, largc, largv);
}
int chpl_launch(int argc, char* argv[], int32_t numLocales) {
if (numLocales != 1) {
// This error should be taken care of before we get to this point
chpl_internal_error("The XMT launcher only supports numLocales==1");
}
return chpl_launch_using_exec("zebra",
chpl_launch_create_argv(argc, argv),
argv[0]);
}
int chpl_launch_handle_arg(int argc, char* argv[], int argNum,
int32_t lineno, chpl_string filename) {
return 0;
}
void chpl_launch_print_help(void) {
}
|
Update the zebra launcher to use chpl_launch_using_exec() as implemented in r18010.
|
Update the zebra launcher to use chpl_launch_using_exec() as
implemented in r18010.
git-svn-id: 88467cb1fb04b8a755be7e1ee1026be4190196ef@18032 3a8e244f-b0f2-452b-bcba-4c88e055c3ca
|
C
|
apache-2.0
|
CoryMcCartan/chapel,CoryMcCartan/chapel,sungeunchoi/chapel,chizarlicious/chapel,sungeunchoi/chapel,CoryMcCartan/chapel,chizarlicious/chapel,hildeth/chapel,sungeunchoi/chapel,hildeth/chapel,CoryMcCartan/chapel,CoryMcCartan/chapel,CoryMcCartan/chapel,chizarlicious/chapel,chizarlicious/chapel,CoryMcCartan/chapel,hildeth/chapel,hildeth/chapel,hildeth/chapel,hildeth/chapel,hildeth/chapel,sungeunchoi/chapel,sungeunchoi/chapel,chizarlicious/chapel,chizarlicious/chapel,sungeunchoi/chapel,sungeunchoi/chapel,chizarlicious/chapel,sungeunchoi/chapel
|
c
|
## Code Before:
static char* chpl_launch_create_command(int argc, char* argv[],
int32_t numLocales) {
int i;
int size;
char baseCommand[256];
char* command;
chpl_compute_real_binary_name(argv[0]);
sprintf(baseCommand, "zebra -fast -r %s", chpl_get_real_binary_name());
size = strlen(baseCommand) + 1;
for (i=1; i<argc; i++) {
size += strlen(argv[i]) + 3;
}
command = chpl_malloc(size, sizeof(char), CHPL_RT_MD_COMMAND_BUFFER, -1, "");
sprintf(command, "%s", baseCommand);
for (i=1; i<argc; i++) {
strcat(command, " '");
strcat(command, argv[i]);
strcat(command, "'");
}
if (strlen(command)+1 > size) {
chpl_internal_error("buffer overflow");
}
return command;
}
int chpl_launch(int argc, char* argv[], int32_t numLocales) {
return chpl_launch_using_system(chpl_launch_create_command(argc, argv, numLocales),
argv[0]);
}
int chpl_launch_handle_arg(int argc, char* argv[], int argNum,
int32_t lineno, chpl_string filename) {
return 0;
}
void chpl_launch_print_help(void) {
}
## Instruction:
Update the zebra launcher to use chpl_launch_using_exec() as
implemented in r18010.
git-svn-id: 88467cb1fb04b8a755be7e1ee1026be4190196ef@18032 3a8e244f-b0f2-452b-bcba-4c88e055c3ca
## Code After:
static char** chpl_launch_create_argv(int argc, char* argv[]) {
const int largc = 3;
char *largv[largc];
largv[0] = (char *) "zebra";
largv[1] = (char *) "-fast";
largv[2] = (char *) "-r";
return chpl_bundle_exec_args(argc, argv, largc, largv);
}
int chpl_launch(int argc, char* argv[], int32_t numLocales) {
if (numLocales != 1) {
// This error should be taken care of before we get to this point
chpl_internal_error("The XMT launcher only supports numLocales==1");
}
return chpl_launch_using_exec("zebra",
chpl_launch_create_argv(argc, argv),
argv[0]);
}
int chpl_launch_handle_arg(int argc, char* argv[], int argNum,
int32_t lineno, chpl_string filename) {
return 0;
}
void chpl_launch_print_help(void) {
}
|
// ... existing code ...
static char** chpl_launch_create_argv(int argc, char* argv[]) {
const int largc = 3;
char *largv[largc];
largv[0] = (char *) "zebra";
largv[1] = (char *) "-fast";
largv[2] = (char *) "-r";
return chpl_bundle_exec_args(argc, argv, largc, largv);
}
int chpl_launch(int argc, char* argv[], int32_t numLocales) {
if (numLocales != 1) {
// This error should be taken care of before we get to this point
chpl_internal_error("The XMT launcher only supports numLocales==1");
}
return chpl_launch_using_exec("zebra",
chpl_launch_create_argv(argc, argv),
argv[0]);
}
// ... rest of the code ...
|
3fcdb9e64ef955fd0a7e5b2fda481d351dfb4d18
|
spotify/__init__.py
|
spotify/__init__.py
|
from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
|
from __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
|
Add NullHandler to the 'spotify' logger
|
Add NullHandler to the 'spotify' logger
|
Python
|
apache-2.0
|
jodal/pyspotify,mopidy/pyspotify,kotamat/pyspotify,jodal/pyspotify,kotamat/pyspotify,felix1m/pyspotify,mopidy/pyspotify,kotamat/pyspotify,felix1m/pyspotify,felix1m/pyspotify,jodal/pyspotify
|
python
|
## Code Before:
from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
## Instruction:
Add NullHandler to the 'spotify' logger
## Code After:
from __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
|
...
from __future__ import unicode_literals
import logging
import os
import weakref
...
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
...
|
2e5c830a46c415385d24a046f29f526ac4762b91
|
colplus-parser/src/test/java/org/col/parser/LanguageParserTest.java
|
colplus-parser/src/test/java/org/col/parser/LanguageParserTest.java
|
package org.col.parser;
import java.util.List;
import com.google.common.collect.Lists;
import org.junit.Test;
/**
*
*/
public class LanguageParserTest extends ParserTestBase<String> {
public LanguageParserTest() {
super(LanguageParser.PARSER);
}
@Test
public void parse() throws Exception {
assertParse("deu", "de");
assertParse("deu", "deu");
assertParse("deu", "german");
assertParse("deu", "deutsch");
assertParse("deu", "GER");
assertParse("eng", "en");
assertUnparsable("unknown");
assertUnparsable("zz");
}
@Override
List<String> additionalUnparsableValues() {
return Lists.newArrayList("term", "deuter");
}
}
|
package org.col.parser;
import java.util.List;
import com.google.common.collect.Lists;
import org.junit.Test;
/**
*
*/
public class LanguageParserTest extends ParserTestBase<String> {
public LanguageParserTest() {
super(LanguageParser.PARSER);
}
@Test
public void parse() throws Exception {
assertParse("deu", "de");
assertParse("deu", "deu");
assertParse("deu", "german");
assertParse("deu", "deutsch");
assertParse("deu", "GER");
assertParse("eng", "en");
for (String x : new String[]{"Limburgan", "Limburger", "Limburgish", "Lim", "li"}) {
assertParse("lim", x);
}
assertUnparsable("unknown");
assertUnparsable("zz");
}
@Override
List<String> additionalUnparsableValues() {
return Lists.newArrayList("term", "deuter");
}
}
|
Add more lang parse tests
|
Add more lang parse tests
|
Java
|
apache-2.0
|
Sp2000/colplus-backend
|
java
|
## Code Before:
package org.col.parser;
import java.util.List;
import com.google.common.collect.Lists;
import org.junit.Test;
/**
*
*/
public class LanguageParserTest extends ParserTestBase<String> {
public LanguageParserTest() {
super(LanguageParser.PARSER);
}
@Test
public void parse() throws Exception {
assertParse("deu", "de");
assertParse("deu", "deu");
assertParse("deu", "german");
assertParse("deu", "deutsch");
assertParse("deu", "GER");
assertParse("eng", "en");
assertUnparsable("unknown");
assertUnparsable("zz");
}
@Override
List<String> additionalUnparsableValues() {
return Lists.newArrayList("term", "deuter");
}
}
## Instruction:
Add more lang parse tests
## Code After:
package org.col.parser;
import java.util.List;
import com.google.common.collect.Lists;
import org.junit.Test;
/**
*
*/
public class LanguageParserTest extends ParserTestBase<String> {
public LanguageParserTest() {
super(LanguageParser.PARSER);
}
@Test
public void parse() throws Exception {
assertParse("deu", "de");
assertParse("deu", "deu");
assertParse("deu", "german");
assertParse("deu", "deutsch");
assertParse("deu", "GER");
assertParse("eng", "en");
for (String x : new String[]{"Limburgan", "Limburger", "Limburgish", "Lim", "li"}) {
assertParse("lim", x);
}
assertUnparsable("unknown");
assertUnparsable("zz");
}
@Override
List<String> additionalUnparsableValues() {
return Lists.newArrayList("term", "deuter");
}
}
|
# ... existing code ...
assertParse("deu", "deutsch");
assertParse("deu", "GER");
assertParse("eng", "en");
for (String x : new String[]{"Limburgan", "Limburger", "Limburgish", "Lim", "li"}) {
assertParse("lim", x);
}
assertUnparsable("unknown");
assertUnparsable("zz");
}
# ... rest of the code ...
|
5fa4ddaa1e4912fbfbb2db7b0807875b0b30504c
|
src/main/java/de/diesner/ehzlogger/EhzLogger.java
|
src/main/java/de/diesner/ehzlogger/EhzLogger.java
|
package de.diesner.ehzlogger;
import java.io.IOException;
import java.util.List;
import gnu.io.PortInUseException;
import gnu.io.UnsupportedCommOperationException;
import org.openmuc.jsml.structures.*;
import org.openmuc.jsml.tl.SML_SerialReceiver;
public class EhzLogger {
public static void main(String[] args) throws IOException, PortInUseException, UnsupportedCommOperationException {
System.setProperty("gnu.io.rxtx.SerialPorts", "/dev/ttyUSB0");
final SML_SerialReceiver receiver = new SML_SerialReceiver();
receiver.setupComPort("/dev/ttyUSB0");
Runtime.getRuntime().addShutdownHook(new Thread()
{
public void run()
{
try {
receiver.close();
} catch (IOException e) {
System.err.println("Error while trying to close serial port: " + e.getMessage());
}
}
});
CmdLinePrint cmdLinePrint = new CmdLinePrint();
while (true) {
SML_File smlFile = receiver.getSMLFile();
System.out.println("Got SML_File");
cmdLinePrint.messageReceived(smlFile.getMessages());
}
}
}
|
package de.diesner.ehzlogger;
import java.io.IOException;
import java.util.List;
import gnu.io.PortInUseException;
import gnu.io.UnsupportedCommOperationException;
import org.openmuc.jsml.structures.*;
import org.openmuc.jsml.tl.SML_SerialReceiver;
public class EhzLogger {
public static void main(String[] args) throws IOException, PortInUseException, UnsupportedCommOperationException {
System.setProperty("gnu.io.rxtx.SerialPorts", "/dev/ttyUSB0");
final SML_SerialReceiver receiver = new SML_SerialReceiver();
receiver.setupComPort("/dev/ttyUSB0");
Runtime.getRuntime().addShutdownHook(new Thread()
{
public void run()
{
try {
receiver.close();
} catch (IOException e) {
System.err.println("Error while trying to close serial port: " + e.getMessage());
}
}
});
SmlForwarder forwarder = new CmdLinePrint();
while (true) {
SML_File smlFile = receiver.getSMLFile();
System.out.println("Got SML_File");
forwarder.messageReceived(smlFile.getMessages());
}
}
}
|
Use interface in main to access SmlForwarder
|
Use interface in main to access SmlForwarder
|
Java
|
apache-2.0
|
adiesner/eHzLogger
|
java
|
## Code Before:
package de.diesner.ehzlogger;
import java.io.IOException;
import java.util.List;
import gnu.io.PortInUseException;
import gnu.io.UnsupportedCommOperationException;
import org.openmuc.jsml.structures.*;
import org.openmuc.jsml.tl.SML_SerialReceiver;
public class EhzLogger {
public static void main(String[] args) throws IOException, PortInUseException, UnsupportedCommOperationException {
System.setProperty("gnu.io.rxtx.SerialPorts", "/dev/ttyUSB0");
final SML_SerialReceiver receiver = new SML_SerialReceiver();
receiver.setupComPort("/dev/ttyUSB0");
Runtime.getRuntime().addShutdownHook(new Thread()
{
public void run()
{
try {
receiver.close();
} catch (IOException e) {
System.err.println("Error while trying to close serial port: " + e.getMessage());
}
}
});
CmdLinePrint cmdLinePrint = new CmdLinePrint();
while (true) {
SML_File smlFile = receiver.getSMLFile();
System.out.println("Got SML_File");
cmdLinePrint.messageReceived(smlFile.getMessages());
}
}
}
## Instruction:
Use interface in main to access SmlForwarder
## Code After:
package de.diesner.ehzlogger;
import java.io.IOException;
import java.util.List;
import gnu.io.PortInUseException;
import gnu.io.UnsupportedCommOperationException;
import org.openmuc.jsml.structures.*;
import org.openmuc.jsml.tl.SML_SerialReceiver;
public class EhzLogger {
public static void main(String[] args) throws IOException, PortInUseException, UnsupportedCommOperationException {
System.setProperty("gnu.io.rxtx.SerialPorts", "/dev/ttyUSB0");
final SML_SerialReceiver receiver = new SML_SerialReceiver();
receiver.setupComPort("/dev/ttyUSB0");
Runtime.getRuntime().addShutdownHook(new Thread()
{
public void run()
{
try {
receiver.close();
} catch (IOException e) {
System.err.println("Error while trying to close serial port: " + e.getMessage());
}
}
});
SmlForwarder forwarder = new CmdLinePrint();
while (true) {
SML_File smlFile = receiver.getSMLFile();
System.out.println("Got SML_File");
forwarder.messageReceived(smlFile.getMessages());
}
}
}
|
// ... existing code ...
}
});
SmlForwarder forwarder = new CmdLinePrint();
while (true) {
// ... modified code ...
SML_File smlFile = receiver.getSMLFile();
System.out.println("Got SML_File");
forwarder.messageReceived(smlFile.getMessages());
}
}
// ... rest of the code ...
|
c627504b77b68b133ce2cde73d192e4c40f436a5
|
TrailsKit/TrailsKit.h
|
TrailsKit/TrailsKit.h
|
//
// TrailsKit.h
// TrailsKit
//
// Created by Mike Mertsock on 1/1/13.
// Copyright (c) 2013 Esker Apps. All rights reserved.
//
#ifndef TrailsKit_TrailsKit_h
#define TrailsKit_TrailsKit_h
#import "TrailsKitGeometry.h"
#import "TKGPXPolylineMapper.h"
#import "TrailsKitUI.h"
#import "TrailsKitTypes.h"
#endif
|
//
// TrailsKit.h
// TrailsKit
//
// Created by Mike Mertsock on 1/1/13.
// Copyright (c) 2013 Esker Apps. All rights reserved.
//
#ifndef TrailsKit_TrailsKit_h
#define TrailsKit_TrailsKit_h
#import "TrailsKitGeometry.h"
#import "TrailsKitParsers.h"
#import "TrailsKitTypes.h"
#import "TrailsKitUI.h"
#endif
|
Add parsers header to root header
|
Add parsers header to root header
|
C
|
mit
|
mmertsock/TrailsKit
|
c
|
## Code Before:
//
// TrailsKit.h
// TrailsKit
//
// Created by Mike Mertsock on 1/1/13.
// Copyright (c) 2013 Esker Apps. All rights reserved.
//
#ifndef TrailsKit_TrailsKit_h
#define TrailsKit_TrailsKit_h
#import "TrailsKitGeometry.h"
#import "TKGPXPolylineMapper.h"
#import "TrailsKitUI.h"
#import "TrailsKitTypes.h"
#endif
## Instruction:
Add parsers header to root header
## Code After:
//
// TrailsKit.h
// TrailsKit
//
// Created by Mike Mertsock on 1/1/13.
// Copyright (c) 2013 Esker Apps. All rights reserved.
//
#ifndef TrailsKit_TrailsKit_h
#define TrailsKit_TrailsKit_h
#import "TrailsKitGeometry.h"
#import "TrailsKitParsers.h"
#import "TrailsKitTypes.h"
#import "TrailsKitUI.h"
#endif
|
// ... existing code ...
#define TrailsKit_TrailsKit_h
#import "TrailsKitGeometry.h"
#import "TrailsKitParsers.h"
#import "TrailsKitTypes.h"
#import "TrailsKitUI.h"
#endif
// ... rest of the code ...
|
8207d86b7b2a6e1f81454eefea4784d89c8674a8
|
resolver_test/django_test.py
|
resolver_test/django_test.py
|
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
|
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
|
Use different usernames for each test. by: Glenn, Giles
|
Use different usernames for each test. by: Glenn, Giles
|
Python
|
mit
|
pythonanywhere/resolver_test
|
python
|
## Code Before:
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
## Instruction:
Use different usernames for each test. by: Glenn, Giles
## Code After:
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
|
# ... existing code ...
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
# ... rest of the code ...
|
dbb668c3f72ab6d20abe08f9f23b7d66cfa0d8c3
|
ideascube/blog/forms.py
|
ideascube/blog/forms.py
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
Fix the blog content form
|
Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
python
|
## Code Before:
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
## Instruction:
Fix the blog content form
The form plays a trick on the 'text' field: it hides it, and creates a
nicer-looking 'content' field. When submitting the form, the content of
the 'content' field is injected into the 'text' field.
This works great, until Django 1.10.
With Django 1.10, the browser refuses to submit the form, saying the
'text' field is empty and required.
This is due to this hidden gem from the Django 1.10 release notes:
> * Required form fields now have the required HTML attribute. Set the
> Form.use_required_attribute attribute to False to disable it. You
> could also add the novalidate attribute to <form> if you don’t want
> browser validation.
https://docs.djangoproject.com/en/1.10/releases/1.10/#miscellaneous
This commit fixes the blog content form, which is the only one I found
problematic so far.
Hopefully it is the only one that is actually impacted. :-/
## Code After:
from django import forms
from ideascube.widgets import LangSelect
from .models import Content
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
widgets = {
# We need a normalized date string for JS datepicker, so we take
# control over the format to bypass L10N.
"published_at": forms.DateInput(format='%Y-%m-%d'),
"lang": LangSelect,
}
fields = "__all__"
def save(self, commit=True):
content = super().save()
content.save() # Index m2m.
return content
|
// ... existing code ...
class ContentForm(forms.ModelForm):
use_required_attribute = False
class Meta:
model = Content
// ... rest of the code ...
|
2306478f67a93e27dd9d7d397f97e3641df3516a
|
ipython_startup.py
|
ipython_startup.py
|
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
|
from __future__ import division
from __future__ import absolute_import
import scipy as sp
import itertools as it
import functools as ft
import operator as op
import sys
import sympy
# Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.pyplot import subplots
from matplotlib.pyplot import show as pltshow
# and import some common functions into the global namespace
from scipy.linalg import norm
from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
from math import atan2, acos
from sympy import Rational as sRat
from sympy import pretty as spretty
|
Add lots of useful default imports to ipython
|
Add lots of useful default imports to ipython
|
Python
|
cc0-1.0
|
davidshepherd7/dotfiles,davidshepherd7/dotfiles,davidshepherd7/dotfiles,davidshepherd7/dotfiles,davidshepherd7/dotfiles
|
python
|
## Code Before:
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
## Instruction:
Add lots of useful default imports to ipython
## Code After:
from __future__ import division
from __future__ import absolute_import
import scipy as sp
import itertools as it
import functools as ft
import operator as op
import sys
import sympy
# Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.pyplot import subplots
from matplotlib.pyplot import show as pltshow
# and import some common functions into the global namespace
from scipy.linalg import norm
from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
from math import atan2, acos
from sympy import Rational as sRat
from sympy import pretty as spretty
|
# ... existing code ...
from __future__ import division
from __future__ import absolute_import
import scipy as sp
import itertools as it
import functools as ft
import operator as op
import sys
import sympy
# Plotting
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.pyplot import subplots
from matplotlib.pyplot import show as pltshow
# and import some common functions into the global namespace
from scipy.linalg import norm
from scipy import sin, cos, tan, log, pi, sqrt, exp, mean
from math import atan2, acos
from sympy import Rational as sRat
from sympy import pretty as spretty
# ... rest of the code ...
|
4a7b0fb482011400da0b3e760cde2d6f294d168f
|
sysrev/models.py
|
sysrev/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
|
from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
|
Add completed field to review
|
Add completed field to review
|
Python
|
mit
|
iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview
|
python
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
## Instruction:
Add completed field to review
## Code After:
from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
|
# ... existing code ...
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField()
# ... rest of the code ...
|
555637aa86bef0b3cf5d3fe67b0341bcee5e271a
|
findaconf/tests/test_autocomplete_routes.py
|
findaconf/tests/test_autocomplete_routes.py
|
from findaconf import app, db
from unittest import TestCase
from findaconf.tests.config import set_app, unset_app
class TestAutoCompleteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/autocomplete.py
def test_keywords(self):
url = '/autocomplete/keywords?query=sociology&limit=10'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places(self):
url = '/autocomplete/places?query=University%20of%20Essex'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
|
from findaconf import app, db
from unittest import TestCase
from findaconf.tests.config import set_app, unset_app
class TestAutoCompleteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/autocomplete.py
def test_keywords(self):
url = '/autocomplete/keywords?query=sociology&limit=10'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places(self):
url = '/autocomplete/places?query=University%20of%20Essex'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places_blank(self):
resp = self.app.get('/autocomplete/places?query=')
assert resp.status_code == 404
print resp.data
def test_google_places_wrong_proxy(self):
original_proxy = app.config['GOOGLE_PLACES_PROXY']
app.config['GOOGLE_PLACES_PROXY'] = 'http://python.org/ruby'
url = '/autocomplete/places?query=University'
resp = self.app.get(url)
assert resp.status_code == 404
app.config['GOOGLE_PLACES_PROXY'] = original_proxy
|
Add tests for 404 on invalid routes
|
Add tests for 404 on invalid routes
|
Python
|
mit
|
cuducos/findaconf,koorukuroo/findaconf,cuducos/findaconf,cuducos/findaconf,koorukuroo/findaconf,koorukuroo/findaconf
|
python
|
## Code Before:
from findaconf import app, db
from unittest import TestCase
from findaconf.tests.config import set_app, unset_app
class TestAutoCompleteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/autocomplete.py
def test_keywords(self):
url = '/autocomplete/keywords?query=sociology&limit=10'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places(self):
url = '/autocomplete/places?query=University%20of%20Essex'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
## Instruction:
Add tests for 404 on invalid routes
## Code After:
from findaconf import app, db
from unittest import TestCase
from findaconf.tests.config import set_app, unset_app
class TestAutoCompleteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/autocomplete.py
def test_keywords(self):
url = '/autocomplete/keywords?query=sociology&limit=10'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places(self):
url = '/autocomplete/places?query=University%20of%20Essex'
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places_blank(self):
resp = self.app.get('/autocomplete/places?query=')
assert resp.status_code == 404
print resp.data
def test_google_places_wrong_proxy(self):
original_proxy = app.config['GOOGLE_PLACES_PROXY']
app.config['GOOGLE_PLACES_PROXY'] = 'http://python.org/ruby'
url = '/autocomplete/places?query=University'
resp = self.app.get(url)
assert resp.status_code == 404
app.config['GOOGLE_PLACES_PROXY'] = original_proxy
|
# ... existing code ...
resp = self.app.get(url)
assert resp.status_code == 200
assert resp.mimetype == 'application/json'
def test_google_places_blank(self):
resp = self.app.get('/autocomplete/places?query=')
assert resp.status_code == 404
print resp.data
def test_google_places_wrong_proxy(self):
original_proxy = app.config['GOOGLE_PLACES_PROXY']
app.config['GOOGLE_PLACES_PROXY'] = 'http://python.org/ruby'
url = '/autocomplete/places?query=University'
resp = self.app.get(url)
assert resp.status_code == 404
app.config['GOOGLE_PLACES_PROXY'] = original_proxy
# ... rest of the code ...
|
9abc56ec060ec6366878a829fa9554cde28e8925
|
Settings/Tab.h
|
Settings/Tab.h
|
class UIContext;
/// <summary>
/// Abstract class that encapsulates functionality for dealing with
/// property sheet pages (tabs).
/// </summary>
class Tab {
public:
Tab();
~Tab();
/// <summary>Processes messages sent to the tab page.</summary>
virtual DLGPROC TabProc(
HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam);
/// <summary>Persists changes made on the tab page</summary>
virtual void SaveSettings() = 0;
protected:
HWND _hWnd;
UIContext *_ctxt;
/// <summary>
/// Performs intitialization for the tab page, similar to a constructor.
/// Since tab page windows are created on demand, this method could be
/// called much later than the constructor for the tab.
/// </summary>
virtual void Initialize() = 0;
/// <summary>Applies the current settings state to the tab page.</summary>
virtual void LoadSettings() = 0;
/// <summary>Handles WM_COMMAND messages.</summary>
/// <param name="nCode">Control-defined notification code</param>
/// <param name="ctrlId">Control identifier</param>
virtual DLGPROC Command(unsigned short nCode, unsigned short ctrlId) = 0;
/// <summary>Handles WM_NOTIFY messages.</summary>
/// <param name="nHdr">Notification header structure</param>
virtual DLGPROC Notification(NMHDR *nHdr) = 0;
};
|
class UIContext;
/// <summary>
/// Abstract class that encapsulates functionality for dealing with
/// property sheet pages (tabs).
/// </summary>
class Tab {
public:
Tab();
~Tab();
/// <summary>Processes messages sent to the tab page.</summary>
virtual DLGPROC TabProc(
HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam);
/// <summary>Persists changes made on the tab page</summary>
virtual void SaveSettings() = 0;
protected:
/// <summary>Window handle of this tab.</summary>
HWND _hWnd;
UIContext *_ctxt;
/// <summary>A vector of all the controls present on this tab.</summary>
std::vector<Control *> _controls;
/// <summary>
/// Performs intitialization for the tab page, similar to a constructor.
/// Since tab page windows are created on demand, this method could be
/// called much later than the constructor for the tab.
/// </summary>
virtual void Initialize() = 0;
/// <summary>Applies the current settings state to the tab page.</summary>
virtual void LoadSettings() = 0;
/// <summary>Handles WM_COMMAND messages.</summary>
/// <param name="nCode">Control-defined notification code</param>
/// <param name="ctrlId">Control identifier</param>
virtual DLGPROC Command(unsigned short nCode, unsigned short ctrlId) = 0;
/// <summary>Handles WM_NOTIFY messages.</summary>
/// <param name="nHdr">Notification header structure</param>
virtual DLGPROC Notification(NMHDR *nHdr) = 0;
};
|
Add vector to keep track of controls
|
Add vector to keep track of controls
|
C
|
bsd-2-clause
|
Soulflare3/3RVX,malensek/3RVX,Soulflare3/3RVX,malensek/3RVX,Soulflare3/3RVX,malensek/3RVX
|
c
|
## Code Before:
class UIContext;
/// <summary>
/// Abstract class that encapsulates functionality for dealing with
/// property sheet pages (tabs).
/// </summary>
class Tab {
public:
Tab();
~Tab();
/// <summary>Processes messages sent to the tab page.</summary>
virtual DLGPROC TabProc(
HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam);
/// <summary>Persists changes made on the tab page</summary>
virtual void SaveSettings() = 0;
protected:
HWND _hWnd;
UIContext *_ctxt;
/// <summary>
/// Performs intitialization for the tab page, similar to a constructor.
/// Since tab page windows are created on demand, this method could be
/// called much later than the constructor for the tab.
/// </summary>
virtual void Initialize() = 0;
/// <summary>Applies the current settings state to the tab page.</summary>
virtual void LoadSettings() = 0;
/// <summary>Handles WM_COMMAND messages.</summary>
/// <param name="nCode">Control-defined notification code</param>
/// <param name="ctrlId">Control identifier</param>
virtual DLGPROC Command(unsigned short nCode, unsigned short ctrlId) = 0;
/// <summary>Handles WM_NOTIFY messages.</summary>
/// <param name="nHdr">Notification header structure</param>
virtual DLGPROC Notification(NMHDR *nHdr) = 0;
};
## Instruction:
Add vector to keep track of controls
## Code After:
class UIContext;
/// <summary>
/// Abstract class that encapsulates functionality for dealing with
/// property sheet pages (tabs).
/// </summary>
class Tab {
public:
Tab();
~Tab();
/// <summary>Processes messages sent to the tab page.</summary>
virtual DLGPROC TabProc(
HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam);
/// <summary>Persists changes made on the tab page</summary>
virtual void SaveSettings() = 0;
protected:
/// <summary>Window handle of this tab.</summary>
HWND _hWnd;
UIContext *_ctxt;
/// <summary>A vector of all the controls present on this tab.</summary>
std::vector<Control *> _controls;
/// <summary>
/// Performs intitialization for the tab page, similar to a constructor.
/// Since tab page windows are created on demand, this method could be
/// called much later than the constructor for the tab.
/// </summary>
virtual void Initialize() = 0;
/// <summary>Applies the current settings state to the tab page.</summary>
virtual void LoadSettings() = 0;
/// <summary>Handles WM_COMMAND messages.</summary>
/// <param name="nCode">Control-defined notification code</param>
/// <param name="ctrlId">Control identifier</param>
virtual DLGPROC Command(unsigned short nCode, unsigned short ctrlId) = 0;
/// <summary>Handles WM_NOTIFY messages.</summary>
/// <param name="nHdr">Notification header structure</param>
virtual DLGPROC Notification(NMHDR *nHdr) = 0;
};
|
# ... existing code ...
virtual void SaveSettings() = 0;
protected:
/// <summary>Window handle of this tab.</summary>
HWND _hWnd;
UIContext *_ctxt;
/// <summary>A vector of all the controls present on this tab.</summary>
std::vector<Control *> _controls;
/// <summary>
/// Performs intitialization for the tab page, similar to a constructor.
# ... rest of the code ...
|
4c8787ba8d68f0e3fc83c38165abc3e1d4981c8c
|
buffer/src/main/java/io/atomix/catalyst/buffer/util/DirectMemoryAllocator.java
|
buffer/src/main/java/io/atomix/catalyst/buffer/util/DirectMemoryAllocator.java
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.catalyst.buffer.util;
/**
* Direct memory allocator.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
public class DirectMemoryAllocator implements MemoryAllocator<NativeMemory> {
@Override
public DirectMemory allocate(long size) {
DirectMemory memory = new DirectMemory(DirectMemory.UNSAFE.allocateMemory(size), size, this);
DirectMemory.UNSAFE.setMemory(memory.address(), size, (byte) 0);
return memory;
}
@Override
public DirectMemory reallocate(NativeMemory memory, long size) {
return new DirectMemory(DirectMemory.UNSAFE.reallocateMemory(memory.address(), size), size, this);
}
}
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.catalyst.buffer.util;
/**
* Direct memory allocator.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
public class DirectMemoryAllocator implements MemoryAllocator<NativeMemory> {
@Override
public DirectMemory allocate(long size) {
DirectMemory memory = new DirectMemory(DirectMemory.UNSAFE.allocateMemory(size), size, this);
DirectMemory.UNSAFE.setMemory(memory.address(), size, (byte) 0);
return memory;
}
@Override
public DirectMemory reallocate(NativeMemory memory, long size) {
DirectMemory newMemory = new DirectMemory(DirectMemory.UNSAFE.reallocateMemory(memory.address(), size), size, this);
if (newMemory.size() > memory.size()) {
DirectMemory.UNSAFE.setMemory(newMemory.address(), newMemory.size() - memory.size(), (byte) 0);
}
return newMemory;
}
}
|
Initialize reallocated unsafe direct memory to 0.
|
Initialize reallocated unsafe direct memory to 0.
|
Java
|
apache-2.0
|
atomix/catalyst,atomix/catalyst
|
java
|
## Code Before:
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.catalyst.buffer.util;
/**
* Direct memory allocator.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
public class DirectMemoryAllocator implements MemoryAllocator<NativeMemory> {
@Override
public DirectMemory allocate(long size) {
DirectMemory memory = new DirectMemory(DirectMemory.UNSAFE.allocateMemory(size), size, this);
DirectMemory.UNSAFE.setMemory(memory.address(), size, (byte) 0);
return memory;
}
@Override
public DirectMemory reallocate(NativeMemory memory, long size) {
return new DirectMemory(DirectMemory.UNSAFE.reallocateMemory(memory.address(), size), size, this);
}
}
## Instruction:
Initialize reallocated unsafe direct memory to 0.
## Code After:
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.catalyst.buffer.util;
/**
* Direct memory allocator.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
public class DirectMemoryAllocator implements MemoryAllocator<NativeMemory> {
@Override
public DirectMemory allocate(long size) {
DirectMemory memory = new DirectMemory(DirectMemory.UNSAFE.allocateMemory(size), size, this);
DirectMemory.UNSAFE.setMemory(memory.address(), size, (byte) 0);
return memory;
}
@Override
public DirectMemory reallocate(NativeMemory memory, long size) {
DirectMemory newMemory = new DirectMemory(DirectMemory.UNSAFE.reallocateMemory(memory.address(), size), size, this);
if (newMemory.size() > memory.size()) {
DirectMemory.UNSAFE.setMemory(newMemory.address(), newMemory.size() - memory.size(), (byte) 0);
}
return newMemory;
}
}
|
...
@Override
public DirectMemory reallocate(NativeMemory memory, long size) {
DirectMemory newMemory = new DirectMemory(DirectMemory.UNSAFE.reallocateMemory(memory.address(), size), size, this);
if (newMemory.size() > memory.size()) {
DirectMemory.UNSAFE.setMemory(newMemory.address(), newMemory.size() - memory.size(), (byte) 0);
}
return newMemory;
}
}
...
|
a81bfd86a95a235f56710f2e511856a3d80eaad1
|
src/test/java/org/jboss/loom/migrators/_ext/ExternalMigratorsLoaderTest.java
|
src/test/java/org/jboss/loom/migrators/_ext/ExternalMigratorsLoaderTest.java
|
package org.jboss.loom.migrators._ext;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.jboss.loom.conf.GlobalConfiguration;
import org.jboss.loom.utils.Utils;
import org.junit.Test;
/**
*
* @author Ondrej Zizka, ozizka at redhat.com
*/
public class ExternalMigratorsLoaderTest {
public ExternalMigratorsLoaderTest() {
}
@Test
public void testLoadMigrators() throws Exception {
System.out.println( "loadMigrators" );
File workDir = new File("target/extMigrators/");
FileUtils.forceMkdir( workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestMigrator.mig.xml", workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestJaxbBean.groovy", workDir );
new ExternalMigratorsLoader().loadMigrators( workDir, new GlobalConfiguration() );
}
}// class
|
package org.jboss.loom.migrators._ext;
import java.io.File;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.jboss.loom.conf.GlobalConfiguration;
import org.jboss.loom.utils.Utils;
import org.junit.Test;
/**
*
* @author Ondrej Zizka, ozizka at redhat.com
*/
public class ExternalMigratorsLoaderTest {
public ExternalMigratorsLoaderTest() {
}
@Test
public void testLoadMigrators() throws Exception {
System.out.println( "loadMigrators" );
File workDir = new File("target/extMigrators/");
FileUtils.forceMkdir( workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestMigrator.mig.xml", workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestJaxbBean.groovy", workDir );
Map<Class<? extends DefinitionBasedMigrator>, DefinitionBasedMigrator> migs
= new ExternalMigratorsLoader().loadMigrators( workDir, new GlobalConfiguration() );
for( Map.Entry<Class<? extends DefinitionBasedMigrator>, DefinitionBasedMigrator> entry : migs.entrySet() ) {
Class<? extends DefinitionBasedMigrator> cls = entry.getKey();
DefinitionBasedMigrator mig = entry.getValue();
System.out.println( String.format(" Loaded migrator %s: %s", cls.getName(), mig.toString() ) );
}
}
}// class
|
Improve ext migr loading test
|
Improve ext migr loading test
|
Java
|
apache-2.0
|
OndraZizka/jboss-migration,OndraZizka/jboss-migration,OndraZizka/jboss-migration
|
java
|
## Code Before:
package org.jboss.loom.migrators._ext;
import java.io.File;
import org.apache.commons.io.FileUtils;
import org.jboss.loom.conf.GlobalConfiguration;
import org.jboss.loom.utils.Utils;
import org.junit.Test;
/**
*
* @author Ondrej Zizka, ozizka at redhat.com
*/
public class ExternalMigratorsLoaderTest {
public ExternalMigratorsLoaderTest() {
}
@Test
public void testLoadMigrators() throws Exception {
System.out.println( "loadMigrators" );
File workDir = new File("target/extMigrators/");
FileUtils.forceMkdir( workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestMigrator.mig.xml", workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestJaxbBean.groovy", workDir );
new ExternalMigratorsLoader().loadMigrators( workDir, new GlobalConfiguration() );
}
}// class
## Instruction:
Improve ext migr loading test
## Code After:
package org.jboss.loom.migrators._ext;
import java.io.File;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.jboss.loom.conf.GlobalConfiguration;
import org.jboss.loom.utils.Utils;
import org.junit.Test;
/**
*
* @author Ondrej Zizka, ozizka at redhat.com
*/
public class ExternalMigratorsLoaderTest {
public ExternalMigratorsLoaderTest() {
}
@Test
public void testLoadMigrators() throws Exception {
System.out.println( "loadMigrators" );
File workDir = new File("target/extMigrators/");
FileUtils.forceMkdir( workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestMigrator.mig.xml", workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestJaxbBean.groovy", workDir );
Map<Class<? extends DefinitionBasedMigrator>, DefinitionBasedMigrator> migs
= new ExternalMigratorsLoader().loadMigrators( workDir, new GlobalConfiguration() );
for( Map.Entry<Class<? extends DefinitionBasedMigrator>, DefinitionBasedMigrator> entry : migs.entrySet() ) {
Class<? extends DefinitionBasedMigrator> cls = entry.getKey();
DefinitionBasedMigrator mig = entry.getValue();
System.out.println( String.format(" Loaded migrator %s: %s", cls.getName(), mig.toString() ) );
}
}
}// class
|
...
package org.jboss.loom.migrators._ext;
import java.io.File;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.jboss.loom.conf.GlobalConfiguration;
import org.jboss.loom.utils.Utils;
...
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestMigrator.mig.xml", workDir );
Utils.copyResourceToDir( ExternalMigratorsLoader.class, "TestJaxbBean.groovy", workDir );
Map<Class<? extends DefinitionBasedMigrator>, DefinitionBasedMigrator> migs
= new ExternalMigratorsLoader().loadMigrators( workDir, new GlobalConfiguration() );
for( Map.Entry<Class<? extends DefinitionBasedMigrator>, DefinitionBasedMigrator> entry : migs.entrySet() ) {
Class<? extends DefinitionBasedMigrator> cls = entry.getKey();
DefinitionBasedMigrator mig = entry.getValue();
System.out.println( String.format(" Loaded migrator %s: %s", cls.getName(), mig.toString() ) );
}
}
...
|
2b3e281c228a4efa9483362f10eac74ce4da6178
|
parliament/legacy_urls.py
|
parliament/legacy_urls.py
|
from django.conf.urls import url
from parliament.core.utils import redir_view
from parliament.hansards.redirect_views import hansard_redirect
urlpatterns = [
url(r'^hansards/$', redir_view('parliament.hansards.views.index')),
url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')),
url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
]
|
from django.conf.urls import url
from parliament.core.utils import redir_view
from parliament.hansards.redirect_views import hansard_redirect
urlpatterns = [
url(r'^hansards/$', redir_view('debates')),
url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')),
url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
]
|
Fix a couple of redirect URLs
|
Fix a couple of redirect URLs
|
Python
|
agpl-3.0
|
litui/openparliament,litui/openparliament,rhymeswithcycle/openparliament,rhymeswithcycle/openparliament,rhymeswithcycle/openparliament,litui/openparliament
|
python
|
## Code Before:
from django.conf.urls import url
from parliament.core.utils import redir_view
from parliament.hansards.redirect_views import hansard_redirect
urlpatterns = [
url(r'^hansards/$', redir_view('parliament.hansards.views.index')),
url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('parliament.hansards.views.by_year')),
url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
]
## Instruction:
Fix a couple of redirect URLs
## Code After:
from django.conf.urls import url
from parliament.core.utils import redir_view
from parliament.hansards.redirect_views import hansard_redirect
urlpatterns = [
url(r'^hansards/$', redir_view('debates')),
url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')),
url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/(?P<sequence>\d+)/(?P<only>only|permalink)/$', hansard_redirect),
]
|
...
from parliament.hansards.redirect_views import hansard_redirect
urlpatterns = [
url(r'^hansards/$', redir_view('debates')),
url(r'^hansards/year/(?P<year>\d{4})/$', redir_view('debates_by_year')),
url(r'^hansards/(?P<hansard_id>\d+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_date>[0-9-]+)/$', hansard_redirect),
url(r'^hansards/(?P<hansard_id>\d+)/(?P<sequence>\d+)/$', hansard_redirect),
...
|
e39925db2834a7491f9b8b505e1e1cf181840035
|
clowder_server/views.py
|
clowder_server/views.py
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Alert, Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
frequency = request.POST.get('frequency')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', '[email protected]',
['[email protected]'], fail_silently=False)
if frequency:
expiration_date = datetime.datetime.now() + int(frequency)
Alert.objects.filter(name=name).delete()
Alert.objects.create(
name=name,
expire_at=expiration_date
)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name', 'create')
return context
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Alert, Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
frequency = request.POST.get('frequency')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', '[email protected]',
['[email protected]'], fail_silently=False)
if frequency:
expiration_date = datetime.datetime.now() + int(frequency)
Alert.objects.filter(name=name).delete()
Alert.objects.create(
name=name,
expire_at=expiration_date
)
return HttpResponse('test')
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name', 'create')
return context
|
Add test response to frequency
|
Add test response to frequency
|
Python
|
agpl-3.0
|
framewr/clowder_server,framewr/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server
|
python
|
## Code Before:
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Alert, Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
frequency = request.POST.get('frequency')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', '[email protected]',
['[email protected]'], fail_silently=False)
if frequency:
expiration_date = datetime.datetime.now() + int(frequency)
Alert.objects.filter(name=name).delete()
Alert.objects.create(
name=name,
expire_at=expiration_date
)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name', 'create')
return context
## Instruction:
Add test response to frequency
## Code After:
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Alert, Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
frequency = request.POST.get('frequency')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', '[email protected]',
['[email protected]'], fail_silently=False)
if frequency:
expiration_date = datetime.datetime.now() + int(frequency)
Alert.objects.filter(name=name).delete()
Alert.objects.create(
name=name,
expire_at=expiration_date
)
return HttpResponse('test')
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name', 'create')
return context
|
# ... existing code ...
name=name,
expire_at=expiration_date
)
return HttpResponse('test')
Ping.objects.create(
name=name,
# ... rest of the code ...
|
f0898d02f76cbd77206b6ef278b1da9721b7cd3d
|
unix/sigtables.h
|
unix/sigtables.h
|
typedef struct {
int signal;
char *name;
} Signal;
typedef struct {
int len;
int items[];
} SignalVector;
extern const Signal signals[];
extern const int nsigs;
extern int max_signum;
#define SIGOFFSET(SIG) ((SIG) - 1)
void
InitSignalTables (void);
SignalVector*
CreateSignalVector (void);
void
FreeSignalVector(
SignalVector *svPtr
);
int
GetSignalIdFromObj (
Tcl_Interp *interp,
Tcl_Obj *nameObj
);
#define __POSIX_SIGNAL_SIGTABLES_H
#endif /* __POSIX_SIGNAL_SIGTABLES_H */
/* vim: set ts=8 sts=4 sw=4 sts=4 noet: */
|
typedef struct {
int signal;
char *name;
} Signal;
typedef struct {
int len;
int items[];
} SignalVector;
MODULE_SCOPE const Signal signals[];
MODULE_SCOPE const int nsigs;
MODULE_SCOPE int max_signum;
#define SIGOFFSET(SIG) ((SIG) - 1)
void
InitSignalTables (void);
SignalVector*
CreateSignalVector (void);
void
FreeSignalVector(
SignalVector *svPtr
);
int
GetSignalIdFromObj (
Tcl_Interp *interp,
Tcl_Obj *nameObj
);
#define __POSIX_SIGNAL_SIGTABLES_H
#endif /* __POSIX_SIGNAL_SIGTABLES_H */
/* vim: set ts=8 sts=4 sw=4 sts=4 noet: */
|
Hide global data symbols related to signal tables
|
Hide global data symbols related to signal tables
'extern' decls for "signals", "nsigs" and "max_signum"
changed to 'MODULE_SCOPE' in unix/sigtables.h
to remove it from the symbol table of the generated
shared object file.
|
C
|
mit
|
kostix/posix-signal
|
c
|
## Code Before:
typedef struct {
int signal;
char *name;
} Signal;
typedef struct {
int len;
int items[];
} SignalVector;
extern const Signal signals[];
extern const int nsigs;
extern int max_signum;
#define SIGOFFSET(SIG) ((SIG) - 1)
void
InitSignalTables (void);
SignalVector*
CreateSignalVector (void);
void
FreeSignalVector(
SignalVector *svPtr
);
int
GetSignalIdFromObj (
Tcl_Interp *interp,
Tcl_Obj *nameObj
);
#define __POSIX_SIGNAL_SIGTABLES_H
#endif /* __POSIX_SIGNAL_SIGTABLES_H */
/* vim: set ts=8 sts=4 sw=4 sts=4 noet: */
## Instruction:
Hide global data symbols related to signal tables
'extern' decls for "signals", "nsigs" and "max_signum"
changed to 'MODULE_SCOPE' in unix/sigtables.h
to remove it from the symbol table of the generated
shared object file.
## Code After:
typedef struct {
int signal;
char *name;
} Signal;
typedef struct {
int len;
int items[];
} SignalVector;
MODULE_SCOPE const Signal signals[];
MODULE_SCOPE const int nsigs;
MODULE_SCOPE int max_signum;
#define SIGOFFSET(SIG) ((SIG) - 1)
void
InitSignalTables (void);
SignalVector*
CreateSignalVector (void);
void
FreeSignalVector(
SignalVector *svPtr
);
int
GetSignalIdFromObj (
Tcl_Interp *interp,
Tcl_Obj *nameObj
);
#define __POSIX_SIGNAL_SIGTABLES_H
#endif /* __POSIX_SIGNAL_SIGTABLES_H */
/* vim: set ts=8 sts=4 sw=4 sts=4 noet: */
|
# ... existing code ...
int items[];
} SignalVector;
MODULE_SCOPE const Signal signals[];
MODULE_SCOPE const int nsigs;
MODULE_SCOPE int max_signum;
#define SIGOFFSET(SIG) ((SIG) - 1)
# ... rest of the code ...
|
338c08d0ee8f3937e9c7388f79f06531f85eb554
|
networks/src/main/kotlin/org/walleth/data/networks/all/NetworkDefinition5.kt
|
networks/src/main/kotlin/org/walleth/data/networks/all/NetworkDefinition5.kt
|
package org.walleth.data.networks.all
import org.kethereum.model.ChainDefinition
import org.kethereum.model.ChainId
import org.walleth.data.networks.NetworkDefinition
class NetworkDefinition5 : NetworkDefinition {
override val tokenShortName = "GOR"
override val tokenName = "Goerli Ether"
override val chain = ChainDefinition(ChainId(5L), tokenShortName)
override fun getNetworkName() = "goerli"
override val infoUrl = "https://goerli.net/#about"
override val faucets = listOf(
"https://goerli-faucet.slock.it/?address=%address%"
)
override val rpcEndpoints = listOf("https://rpc.goerli.mudit.blog/",
"https://rpc.slock.it/goerli ",
"https://goerli.prylabs.net/"
)
}
|
package org.walleth.data.networks.all
import org.kethereum.model.ChainDefinition
import org.kethereum.model.ChainId
import org.walleth.data.networks.NetworkDefinition
class NetworkDefinition5 : NetworkDefinition {
override val tokenShortName = "GOR"
override val tokenName = "Goerli Ether"
override val chain = ChainDefinition(ChainId(5L), tokenShortName)
override fun getNetworkName() = "goerli"
override val infoUrl = "https://goerli.net/#about"
override val faucets = listOf(
"https://goerli-faucet.slock.it/?address=%address%"
)
override val rpcEndpoints = listOf(
"https://rpc.slock.it/goerli ",
"https://goerli.prylabs.net",
"https://rpc.goerli.mudit.blog"
)
}
|
Sort by measurement results from @DaveAppleton
|
Sort by measurement results from @DaveAppleton
see https://github.com/DaveAppleton/goerli_test
|
Kotlin
|
mit
|
walleth/kethereum
|
kotlin
|
## Code Before:
package org.walleth.data.networks.all
import org.kethereum.model.ChainDefinition
import org.kethereum.model.ChainId
import org.walleth.data.networks.NetworkDefinition
class NetworkDefinition5 : NetworkDefinition {
override val tokenShortName = "GOR"
override val tokenName = "Goerli Ether"
override val chain = ChainDefinition(ChainId(5L), tokenShortName)
override fun getNetworkName() = "goerli"
override val infoUrl = "https://goerli.net/#about"
override val faucets = listOf(
"https://goerli-faucet.slock.it/?address=%address%"
)
override val rpcEndpoints = listOf("https://rpc.goerli.mudit.blog/",
"https://rpc.slock.it/goerli ",
"https://goerli.prylabs.net/"
)
}
## Instruction:
Sort by measurement results from @DaveAppleton
see https://github.com/DaveAppleton/goerli_test
## Code After:
package org.walleth.data.networks.all
import org.kethereum.model.ChainDefinition
import org.kethereum.model.ChainId
import org.walleth.data.networks.NetworkDefinition
class NetworkDefinition5 : NetworkDefinition {
override val tokenShortName = "GOR"
override val tokenName = "Goerli Ether"
override val chain = ChainDefinition(ChainId(5L), tokenShortName)
override fun getNetworkName() = "goerli"
override val infoUrl = "https://goerli.net/#about"
override val faucets = listOf(
"https://goerli-faucet.slock.it/?address=%address%"
)
override val rpcEndpoints = listOf(
"https://rpc.slock.it/goerli ",
"https://goerli.prylabs.net",
"https://rpc.goerli.mudit.blog"
)
}
|
# ... existing code ...
"https://goerli-faucet.slock.it/?address=%address%"
)
override val rpcEndpoints = listOf(
"https://rpc.slock.it/goerli ",
"https://goerli.prylabs.net",
"https://rpc.goerli.mudit.blog"
)
}
# ... rest of the code ...
|
ab734ec5a64b364fcf8aff3d91eea860887afc42
|
onnxruntime/core/framework/tensor_external_data_info.h
|
onnxruntime/core/framework/tensor_external_data_info.h
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#pragma once
#include <string>
#include "core/common/status.h"
#include "core/graph/onnx_protobuf.h"
#include "core/session/onnxruntime_c_api.h"
namespace onnxruntime {
class ExternalDataInfo {
private:
std::basic_string<ORTCHAR_T> rel_path_;
//-1 means doesn't exist
ptrdiff_t offset_;
//-1 means doesn't exist
ptrdiff_t length_;
std::string checksum_;
public:
const std::basic_string<ORTCHAR_T>& GetRelPath() const { return rel_path_; }
ptrdiff_t GetOffset() const { return offset_; }
ptrdiff_t GetLength() const { return length_; }
const std::string& GetChecksum() const { return checksum_; }
// If the value of 'offset' or 'length' field is larger the max value of ssize_t, this function will treat it as a
// wrong value and return FAIL.
static common::Status Create(const ::google::protobuf::RepeatedPtrField<::ONNX_NAMESPACE::StringStringEntryProto>& input,
std::unique_ptr<ExternalDataInfo>& out);
};
} // namespace onnxruntime
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#pragma once
#include <string>
#include "core/common/status.h"
#include "core/graph/onnx_protobuf.h"
#include "core/session/onnxruntime_c_api.h"
namespace onnxruntime {
class ExternalDataInfo {
private:
std::basic_string<ORTCHAR_T> rel_path_;
ptrdiff_t offset_ = 0;
ptrdiff_t length_ = 0;
std::string checksum_;
public:
const std::basic_string<ORTCHAR_T>& GetRelPath() const { return rel_path_; }
ptrdiff_t GetOffset() const { return offset_; }
ptrdiff_t GetLength() const { return length_; }
const std::string& GetChecksum() const { return checksum_; }
// If the value of 'offset' or 'length' field is larger the max value of ssize_t, this function will treat it as a
// wrong value and return FAIL.
static common::Status Create(const ::google::protobuf::RepeatedPtrField<::ONNX_NAMESPACE::StringStringEntryProto>& input,
std::unique_ptr<ExternalDataInfo>& out);
};
} // namespace onnxruntime
|
Fix a bug in ExternalDataInfo
|
Fix a bug in ExternalDataInfo
|
C
|
mit
|
microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime,microsoft/onnxruntime
|
c
|
## Code Before:
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#pragma once
#include <string>
#include "core/common/status.h"
#include "core/graph/onnx_protobuf.h"
#include "core/session/onnxruntime_c_api.h"
namespace onnxruntime {
class ExternalDataInfo {
private:
std::basic_string<ORTCHAR_T> rel_path_;
//-1 means doesn't exist
ptrdiff_t offset_;
//-1 means doesn't exist
ptrdiff_t length_;
std::string checksum_;
public:
const std::basic_string<ORTCHAR_T>& GetRelPath() const { return rel_path_; }
ptrdiff_t GetOffset() const { return offset_; }
ptrdiff_t GetLength() const { return length_; }
const std::string& GetChecksum() const { return checksum_; }
// If the value of 'offset' or 'length' field is larger the max value of ssize_t, this function will treat it as a
// wrong value and return FAIL.
static common::Status Create(const ::google::protobuf::RepeatedPtrField<::ONNX_NAMESPACE::StringStringEntryProto>& input,
std::unique_ptr<ExternalDataInfo>& out);
};
} // namespace onnxruntime
## Instruction:
Fix a bug in ExternalDataInfo
## Code After:
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#pragma once
#include <string>
#include "core/common/status.h"
#include "core/graph/onnx_protobuf.h"
#include "core/session/onnxruntime_c_api.h"
namespace onnxruntime {
class ExternalDataInfo {
private:
std::basic_string<ORTCHAR_T> rel_path_;
ptrdiff_t offset_ = 0;
ptrdiff_t length_ = 0;
std::string checksum_;
public:
const std::basic_string<ORTCHAR_T>& GetRelPath() const { return rel_path_; }
ptrdiff_t GetOffset() const { return offset_; }
ptrdiff_t GetLength() const { return length_; }
const std::string& GetChecksum() const { return checksum_; }
// If the value of 'offset' or 'length' field is larger the max value of ssize_t, this function will treat it as a
// wrong value and return FAIL.
static common::Status Create(const ::google::protobuf::RepeatedPtrField<::ONNX_NAMESPACE::StringStringEntryProto>& input,
std::unique_ptr<ExternalDataInfo>& out);
};
} // namespace onnxruntime
|
// ... existing code ...
class ExternalDataInfo {
private:
std::basic_string<ORTCHAR_T> rel_path_;
ptrdiff_t offset_ = 0;
ptrdiff_t length_ = 0;
std::string checksum_;
public:
// ... rest of the code ...
|
a5f8b0c02ea53a5f99671eff071a76d68d117a2e
|
tools/hardware.h
|
tools/hardware.h
|
// For disk/drive status
#define HW_NODRIVE 0
#define HW_NODISK 1
#define HW_HAVEDISK 2
// Drive geometry
#define HW_MAXHEADS 2
#define HW_MAXTRACKS 80
#define HW_NORMALSTEPPING 1
#define HW_DOUBLESTEPPING 2
extern int hw_currenttrack;
extern int hw_currenthead;
extern int hw_stepping;
// Initialisation
extern int hw_init();
// Drive control
extern unsigned char hw_detectdisk();
extern void hw_driveselect();
extern void hw_startmotor();
extern void hw_stopmotor();
// Track seeking
extern int hw_attrackzero();
extern void hw_seektotrackzero();
extern void hw_sideselect(const int side);
// Signaling and data sampling
extern int hw_writeprotected();
extern void hw_samplerawtrackdata(char* buf, uint32_t len);
// Clean up
extern void hw_done();
#endif
|
// For disk/drive status
#define HW_NODRIVE 0
#define HW_NODISK 1
#define HW_HAVEDISK 2
// Drive geometry
#define HW_MAXHEADS 2
#define HW_MAXTRACKS 80
#define HW_NORMALSTEPPING 1
#define HW_DOUBLESTEPPING 2
extern int hw_currenttrack;
extern int hw_currenthead;
extern int hw_stepping;
// Initialisation
extern int hw_init();
// Drive control
extern unsigned char hw_detectdisk();
extern void hw_driveselect();
extern void hw_startmotor();
extern void hw_stopmotor();
// Track seeking
extern int hw_attrackzero();
extern void hw_seektotrackzero();
extern void hw_seektotrack(int track);
extern void hw_sideselect(const int side);
// Signaling and data sampling
extern void hw_waitforindex();
extern int hw_writeprotected();
extern void hw_samplerawtrackdata(char* buf, uint32_t len);
// Clean up
extern void hw_done();
#endif
|
Add externs for the functions used
|
Add externs for the functions used
|
C
|
mit
|
picosonic/bbc-fdc,picosonic/bbc-fdc
|
c
|
## Code Before:
// For disk/drive status
#define HW_NODRIVE 0
#define HW_NODISK 1
#define HW_HAVEDISK 2
// Drive geometry
#define HW_MAXHEADS 2
#define HW_MAXTRACKS 80
#define HW_NORMALSTEPPING 1
#define HW_DOUBLESTEPPING 2
extern int hw_currenttrack;
extern int hw_currenthead;
extern int hw_stepping;
// Initialisation
extern int hw_init();
// Drive control
extern unsigned char hw_detectdisk();
extern void hw_driveselect();
extern void hw_startmotor();
extern void hw_stopmotor();
// Track seeking
extern int hw_attrackzero();
extern void hw_seektotrackzero();
extern void hw_sideselect(const int side);
// Signaling and data sampling
extern int hw_writeprotected();
extern void hw_samplerawtrackdata(char* buf, uint32_t len);
// Clean up
extern void hw_done();
#endif
## Instruction:
Add externs for the functions used
## Code After:
// For disk/drive status
#define HW_NODRIVE 0
#define HW_NODISK 1
#define HW_HAVEDISK 2
// Drive geometry
#define HW_MAXHEADS 2
#define HW_MAXTRACKS 80
#define HW_NORMALSTEPPING 1
#define HW_DOUBLESTEPPING 2
extern int hw_currenttrack;
extern int hw_currenthead;
extern int hw_stepping;
// Initialisation
extern int hw_init();
// Drive control
extern unsigned char hw_detectdisk();
extern void hw_driveselect();
extern void hw_startmotor();
extern void hw_stopmotor();
// Track seeking
extern int hw_attrackzero();
extern void hw_seektotrackzero();
extern void hw_seektotrack(int track);
extern void hw_sideselect(const int side);
// Signaling and data sampling
extern void hw_waitforindex();
extern int hw_writeprotected();
extern void hw_samplerawtrackdata(char* buf, uint32_t len);
// Clean up
extern void hw_done();
#endif
|
// ... existing code ...
// Track seeking
extern int hw_attrackzero();
extern void hw_seektotrackzero();
extern void hw_seektotrack(int track);
extern void hw_sideselect(const int side);
// Signaling and data sampling
extern void hw_waitforindex();
extern int hw_writeprotected();
extern void hw_samplerawtrackdata(char* buf, uint32_t len);
// ... rest of the code ...
|
95da47010839da430223700345e07078b2157131
|
evewspace/account/models.py
|
evewspace/account/models.py
|
from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
|
from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
|
Add PlayTime class and tie it to user profiles
|
Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times.
|
Python
|
apache-2.0
|
evewspace/eve-wspace,Maarten28/eve-wspace,proycon/eve-wspace,gpapaz/eve-wspace,Unsettled/eve-wspace,acdervis/eve-wspace,Unsettled/eve-wspace,marbindrakon/eve-wspace,hybrid1969/eve-wspace,Unsettled/eve-wspace,acdervis/eve-wspace,marbindrakon/eve-wspace,Maarten28/eve-wspace,mmalyska/eve-wspace,acdervis/eve-wspace,nyrocron/eve-wspace,nyrocron/eve-wspace,acdervis/eve-wspace,gpapaz/eve-wspace,marbindrakon/eve-wspace,hybrid1969/eve-wspace,Zumochi/eve-wspace,Maarten28/eve-wspace,mmalyska/eve-wspace,evewspace/eve-wspace,Zumochi/eve-wspace,proycon/eve-wspace,hybrid1969/eve-wspace,nyrocron/eve-wspace,nyrocron/eve-wspace,mmalyska/eve-wspace,marbindrakon/eve-wspace,evewspace/eve-wspace,Maarten28/eve-wspace,evewspace/eve-wspace,mmalyska/eve-wspace,gpapaz/eve-wspace,proycon/eve-wspace,proycon/eve-wspace,Zumochi/eve-wspace,hybrid1969/eve-wspace,Unsettled/eve-wspace,gpapaz/eve-wspace,Zumochi/eve-wspace
|
python
|
## Code Before:
from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
## Instruction:
Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times.
## Code After:
from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
|
...
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
...
|
ba722635f13350c4b1e04aeab0838c923deb1985
|
feeds/middlewares.py
|
feeds/middlewares.py
|
import logging
from scrapy.spidermiddlewares.httperror import HttpError
logger = logging.getLogger(__name__)
class FeedsHttpErrorMiddleware:
@classmethod
def from_crawler(cls, crawler):
return cls()
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
logger.warning(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
|
import logging
from scrapy.spidermiddlewares.httperror import HttpError
logger = logging.getLogger(__name__)
class FeedsHttpErrorMiddleware:
@classmethod
def from_crawler(cls, crawler):
return cls()
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
if response.status in [500, 502, 503, 504]:
# These status codes are usually induced by overloaded sites,
# updates, short downtimes, etc. and are not that relevant.
lgr = logger.info
else:
lgr = logger.warning
lgr(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
|
Use log level info for HTTP statuses 500, 502, 503, 504.
|
Use log level info for HTTP statuses 500, 502, 503, 504.
These status codes are usually induced by overloaded sites, updates, short
downtimes, etc. and are not that relevant.
|
Python
|
agpl-3.0
|
Lukas0907/feeds,Lukas0907/feeds,nblock/feeds,nblock/feeds
|
python
|
## Code Before:
import logging
from scrapy.spidermiddlewares.httperror import HttpError
logger = logging.getLogger(__name__)
class FeedsHttpErrorMiddleware:
@classmethod
def from_crawler(cls, crawler):
return cls()
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
logger.warning(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
## Instruction:
Use log level info for HTTP statuses 500, 502, 503, 504.
These status codes are usually induced by overloaded sites, updates, short
downtimes, etc. and are not that relevant.
## Code After:
import logging
from scrapy.spidermiddlewares.httperror import HttpError
logger = logging.getLogger(__name__)
class FeedsHttpErrorMiddleware:
@classmethod
def from_crawler(cls, crawler):
return cls()
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
if response.status in [500, 502, 503, 504]:
# These status codes are usually induced by overloaded sites,
# updates, short downtimes, etc. and are not that relevant.
lgr = logger.info
else:
lgr = logger.warning
lgr(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
|
// ... existing code ...
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
if response.status in [500, 502, 503, 504]:
# These status codes are usually induced by overloaded sites,
# updates, short downtimes, etc. and are not that relevant.
lgr = logger.info
else:
lgr = logger.warning
lgr(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
// ... rest of the code ...
|
e79010f0aedf6f832ef14a72f435ddba33068e35
|
kindergarten-garden/kindergarten_garden.py
|
kindergarten-garden/kindergarten_garden.py
|
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
rows = garden.split()
patches = [rows[0][i:i+2] + rows[1][i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
row1, row2 = garden.split()
patches = [row1[i:i+2] + row2[i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
Use unpacking for simpler code
|
Use unpacking for simpler code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
python
|
## Code Before:
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
rows = garden.split()
patches = [rows[0][i:i+2] + rows[1][i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
## Instruction:
Use unpacking for simpler code
## Code After:
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
row1, row2 = garden.split()
patches = [row1[i:i+2] + row2[i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
# ... existing code ...
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
row1, row2 = garden.split()
patches = [row1[i:i+2] + row2[i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
# ... rest of the code ...
|
e24f89366a8a58a29d26f58b8f21aba437ec1566
|
tests/integration/runners/test_cache.py
|
tests/integration/runners/test_cache.py
|
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
import tests.integration as integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_cache(self):
'''
Store, list, fetch, then flush data
'''
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='test/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='test/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='test/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='test/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='test/runner')
self.assertNotIn('test_cache', ret['return'])
|
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
import tests.integration as integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_cache(self):
'''
Store, list, fetch, then flush data
'''
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='cachetest/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='cachetest/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='cachetest/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertNotIn('test_cache', ret['return'])
|
Use a slightly more specific bank name
|
Use a slightly more specific bank name
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
python
|
## Code Before:
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
import tests.integration as integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_cache(self):
'''
Store, list, fetch, then flush data
'''
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='test/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='test/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='test/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='test/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='test/runner')
self.assertNotIn('test_cache', ret['return'])
## Instruction:
Use a slightly more specific bank name
## Code After:
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
import tests.integration as integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_cache(self):
'''
Store, list, fetch, then flush data
'''
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='cachetest/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='cachetest/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='cachetest/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertNotIn('test_cache', ret['return'])
|
# ... existing code ...
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='cachetest/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='cachetest/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='cachetest/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertNotIn('test_cache', ret['return'])
# ... rest of the code ...
|
81cef77449322df026755bf0d12894066d5bdc3d
|
raven/src/test/java/net/kencochrane/raven/RavenTest.java
|
raven/src/test/java/net/kencochrane/raven/RavenTest.java
|
package net.kencochrane.raven;
import net.kencochrane.raven.connection.Connection;
import net.kencochrane.raven.event.Event;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class RavenTest {
@Mock
private Connection mockConnection;
private Raven raven;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
raven = new Raven(mockConnection);
}
@Test
public void testSendEvent() {
Event event = mock(Event.class);
raven.sendEvent(event);
verify(mockConnection).send(event);
}
}
|
package net.kencochrane.raven;
import net.kencochrane.raven.connection.Connection;
import net.kencochrane.raven.event.Event;
import net.kencochrane.raven.event.EventBuilder;
import net.kencochrane.raven.event.helper.EventBuilderHelper;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.*;
public class RavenTest {
@Mock
private Connection mockConnection;
private Raven raven;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
raven = new Raven(mockConnection);
}
@Test
public void testSendEvent() {
Event event = mock(Event.class);
raven.sendEvent(event);
verify(mockConnection).send(event);
}
@Test
public void testChangeConnection() {
Event event = mock(Event.class);
Connection mockNewConnection = mock(Connection.class);
raven.setConnection(mockNewConnection);
raven.sendEvent(event);
verify(mockConnection, never()).send(event);
verify(mockNewConnection).send(event);
}
@Test
public void testAddRemoveBuilderHelpers() {
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
assertThat(raven.getBuilderHelpers(), not(contains(builderHelper)));
raven.addBuilderHelper(builderHelper);
assertThat(raven.getBuilderHelpers(), contains(builderHelper));
raven.removeBuilderHelper(builderHelper);
assertThat(raven.getBuilderHelpers(), not(contains(builderHelper)));
}
@Test(expected = UnsupportedOperationException.class)
public void testCantModifyBuilderHelpersDirectly() {
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
raven.getBuilderHelpers().add(builderHelper);
}
@Test
public void testRunBuilderHelpers() {
EventBuilder eventBuilder = mock(EventBuilder.class);
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
raven.addBuilderHelper(builderHelper);
raven.runBuilderHelpers(eventBuilder);
verify(builderHelper).helpBuildingEvent(eventBuilder);
}
}
|
Add unit tests for Raven
|
Add unit tests for Raven
|
Java
|
bsd-3-clause
|
buckett/raven-java,littleyang/raven-java,galmeida/raven-java,buckett/raven-java,galmeida/raven-java,reki2000/raven-java6,littleyang/raven-java,reki2000/raven-java6
|
java
|
## Code Before:
package net.kencochrane.raven;
import net.kencochrane.raven.connection.Connection;
import net.kencochrane.raven.event.Event;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
public class RavenTest {
@Mock
private Connection mockConnection;
private Raven raven;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
raven = new Raven(mockConnection);
}
@Test
public void testSendEvent() {
Event event = mock(Event.class);
raven.sendEvent(event);
verify(mockConnection).send(event);
}
}
## Instruction:
Add unit tests for Raven
## Code After:
package net.kencochrane.raven;
import net.kencochrane.raven.connection.Connection;
import net.kencochrane.raven.event.Event;
import net.kencochrane.raven.event.EventBuilder;
import net.kencochrane.raven.event.helper.EventBuilderHelper;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.*;
public class RavenTest {
@Mock
private Connection mockConnection;
private Raven raven;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
raven = new Raven(mockConnection);
}
@Test
public void testSendEvent() {
Event event = mock(Event.class);
raven.sendEvent(event);
verify(mockConnection).send(event);
}
@Test
public void testChangeConnection() {
Event event = mock(Event.class);
Connection mockNewConnection = mock(Connection.class);
raven.setConnection(mockNewConnection);
raven.sendEvent(event);
verify(mockConnection, never()).send(event);
verify(mockNewConnection).send(event);
}
@Test
public void testAddRemoveBuilderHelpers() {
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
assertThat(raven.getBuilderHelpers(), not(contains(builderHelper)));
raven.addBuilderHelper(builderHelper);
assertThat(raven.getBuilderHelpers(), contains(builderHelper));
raven.removeBuilderHelper(builderHelper);
assertThat(raven.getBuilderHelpers(), not(contains(builderHelper)));
}
@Test(expected = UnsupportedOperationException.class)
public void testCantModifyBuilderHelpersDirectly() {
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
raven.getBuilderHelpers().add(builderHelper);
}
@Test
public void testRunBuilderHelpers() {
EventBuilder eventBuilder = mock(EventBuilder.class);
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
raven.addBuilderHelper(builderHelper);
raven.runBuilderHelpers(eventBuilder);
verify(builderHelper).helpBuildingEvent(eventBuilder);
}
}
|
# ... existing code ...
import net.kencochrane.raven.connection.Connection;
import net.kencochrane.raven.event.Event;
import net.kencochrane.raven.event.EventBuilder;
import net.kencochrane.raven.event.helper.EventBuilderHelper;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.*;
public class RavenTest {
@Mock
# ... modified code ...
verify(mockConnection).send(event);
}
@Test
public void testChangeConnection() {
Event event = mock(Event.class);
Connection mockNewConnection = mock(Connection.class);
raven.setConnection(mockNewConnection);
raven.sendEvent(event);
verify(mockConnection, never()).send(event);
verify(mockNewConnection).send(event);
}
@Test
public void testAddRemoveBuilderHelpers() {
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
assertThat(raven.getBuilderHelpers(), not(contains(builderHelper)));
raven.addBuilderHelper(builderHelper);
assertThat(raven.getBuilderHelpers(), contains(builderHelper));
raven.removeBuilderHelper(builderHelper);
assertThat(raven.getBuilderHelpers(), not(contains(builderHelper)));
}
@Test(expected = UnsupportedOperationException.class)
public void testCantModifyBuilderHelpersDirectly() {
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
raven.getBuilderHelpers().add(builderHelper);
}
@Test
public void testRunBuilderHelpers() {
EventBuilder eventBuilder = mock(EventBuilder.class);
EventBuilderHelper builderHelper = mock(EventBuilderHelper.class);
raven.addBuilderHelper(builderHelper);
raven.runBuilderHelpers(eventBuilder);
verify(builderHelper).helpBuildingEvent(eventBuilder);
}
}
# ... rest of the code ...
|
fe19fa7ac7f98525980e5b074bb17015531b2b58
|
buzzwordbingo/views.py
|
buzzwordbingo/views.py
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
Add a description to the main view using Markdown.
|
Add a description to the main view using Markdown.
|
Python
|
bsd-3-clause
|
seanfisk/buzzword-bingo-server,seanfisk/buzzword-bingo-server
|
python
|
## Code Before:
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
## Instruction:
Add a description to the main view using Markdown.
## Code After:
from django.core.urlresolvers import reverse
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
{'name': 'Boards', 'url': reverse('board-root')},
]
|
...
from djangorestframework.views import View
class BuzzwordBingoView(View):
"""The buzzword bingo REST API provides an interface to a collection of
boards, which contain the buzzwords on the board and a list of win
conditions, which are Python code which determines if a given board is a
winning board.
For more about the game of buzzword bingo, please see the [Buzzword Bingo
article on Wikipedia](http://en.wikipedia.org/wiki/Buzzword_bingo).
"""
def get(self, request):
return [{'name': 'Buzzwords', 'url': reverse('buzzword-root')},
{'name': 'Win Conditions', 'url': reverse('win-condition-root')},
...
|
093693a7dc5c39d74498bb99fa3f9782fdad63f1
|
test2/int_overflow/trapv_shift.c
|
test2/int_overflow/trapv_shift.c
|
// RUN: %ucc -ftrapv -fno-const-fold -o %t %s
// RUN: %t; [ $? -ne 0 ]
main()
{
int x;
// ensure powers of two aren't shift-converted, as overflow can't catch this
x = -3 * 0x4000000000000000;
return 0;
}
|
// RUN: %ocheck SIGILL %s -ftrapv -fno-const-fold -DT=int
// RUN: %ocheck SIGILL %s -ftrapv -fno-const-fold -DT=long
// RUN: %ocheck 0 %s -fno-const-fold -DT=int
// RUN: %ocheck 0 %s -fno-const-fold -DT=long
main()
{
// test with T being both int and long, to check how truncations are dealt with
T x;
// ensure powers of two aren't shift-converted, as overflow can't catch this
x = -3 * 0x4000000000000000;
return 0;
}
|
Update trapv tests with explicit signal name
|
Update trapv tests with explicit signal name
|
C
|
mit
|
bobrippling/ucc-c-compiler,bobrippling/ucc-c-compiler,bobrippling/ucc-c-compiler
|
c
|
## Code Before:
// RUN: %ucc -ftrapv -fno-const-fold -o %t %s
// RUN: %t; [ $? -ne 0 ]
main()
{
int x;
// ensure powers of two aren't shift-converted, as overflow can't catch this
x = -3 * 0x4000000000000000;
return 0;
}
## Instruction:
Update trapv tests with explicit signal name
## Code After:
// RUN: %ocheck SIGILL %s -ftrapv -fno-const-fold -DT=int
// RUN: %ocheck SIGILL %s -ftrapv -fno-const-fold -DT=long
// RUN: %ocheck 0 %s -fno-const-fold -DT=int
// RUN: %ocheck 0 %s -fno-const-fold -DT=long
main()
{
// test with T being both int and long, to check how truncations are dealt with
T x;
// ensure powers of two aren't shift-converted, as overflow can't catch this
x = -3 * 0x4000000000000000;
return 0;
}
|
// ... existing code ...
// RUN: %ocheck SIGILL %s -ftrapv -fno-const-fold -DT=int
// RUN: %ocheck SIGILL %s -ftrapv -fno-const-fold -DT=long
// RUN: %ocheck 0 %s -fno-const-fold -DT=int
// RUN: %ocheck 0 %s -fno-const-fold -DT=long
main()
{
// test with T being both int and long, to check how truncations are dealt with
T x;
// ensure powers of two aren't shift-converted, as overflow can't catch this
x = -3 * 0x4000000000000000;
// ... rest of the code ...
|
642f3109cb9fb6179d51de7a7d5781044ff6be3b
|
satori.core/satori/core/__init__.py
|
satori.core/satori/core/__init__.py
|
import os
def manage():
from django.core.management import execute_manager
import satori.core.settings
# HACK
import django.core.management
old_fmm = django.core.management.find_management_module
def find_management_module(app_name):
if app_name == 'satori.core':
return os.path.join(os.path.dirname(__file__), 'management')
else:
return old_fmm(app_name)
django.core.management.find_management_module = find_management_module
# END OF HACK
execute_manager(satori.core.settings)
|
import sys
import os
def manage():
from django.core.management import execute_manager
settings_module_name = os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'satori.core.settings')
__import__(settings_module_name)
settings_module = sys.modules[settings_module_name]
# HACK
import django.core.management
old_fmm = django.core.management.find_management_module
def find_management_module(app_name):
if app_name == 'satori.core':
return os.path.join(os.path.dirname(__file__), 'management')
else:
return old_fmm(app_name)
django.core.management.find_management_module = find_management_module
# END OF HACK
execute_manager(settings_module)
|
Allow specification of custom settings module using DJANGO_SETTINGS_MODULE environment variable.
|
Allow specification of custom settings module using DJANGO_SETTINGS_MODULE environment variable.
|
Python
|
mit
|
zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori,zielmicha/satori
|
python
|
## Code Before:
import os
def manage():
from django.core.management import execute_manager
import satori.core.settings
# HACK
import django.core.management
old_fmm = django.core.management.find_management_module
def find_management_module(app_name):
if app_name == 'satori.core':
return os.path.join(os.path.dirname(__file__), 'management')
else:
return old_fmm(app_name)
django.core.management.find_management_module = find_management_module
# END OF HACK
execute_manager(satori.core.settings)
## Instruction:
Allow specification of custom settings module using DJANGO_SETTINGS_MODULE environment variable.
## Code After:
import sys
import os
def manage():
from django.core.management import execute_manager
settings_module_name = os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'satori.core.settings')
__import__(settings_module_name)
settings_module = sys.modules[settings_module_name]
# HACK
import django.core.management
old_fmm = django.core.management.find_management_module
def find_management_module(app_name):
if app_name == 'satori.core':
return os.path.join(os.path.dirname(__file__), 'management')
else:
return old_fmm(app_name)
django.core.management.find_management_module = find_management_module
# END OF HACK
execute_manager(settings_module)
|
# ... existing code ...
import sys
import os
def manage():
from django.core.management import execute_manager
settings_module_name = os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'satori.core.settings')
__import__(settings_module_name)
settings_module = sys.modules[settings_module_name]
# HACK
import django.core.management
# ... modified code ...
django.core.management.find_management_module = find_management_module
# END OF HACK
execute_manager(settings_module)
# ... rest of the code ...
|
5b8d378c0bc0da49812eb880e40f6de5844d7d70
|
Sources/Core/Transformer/OCATransformer.h
|
Sources/Core/Transformer/OCATransformer.h
|
//
// OCATransformer.h
// Objective-Chain
//
// Created by Martin Kiss on 31.12.13.
// Copyright © 2014 Martin Kiss. All rights reserved.
//
#import "OCATransformer+Base.h"
#import "OCATransformer+Nil.h"
#import "OCATransformer+Predefined.h"
|
//
// OCATransformer.h
// Objective-Chain
//
// Created by Martin Kiss on 31.12.13.
// Copyright © 2014 Martin Kiss. All rights reserved.
//
#import "OCATransformer+Base.h"
#import "OCATransformer+Predefined.h"
|
Fix import of removed file
|
Fix import of removed file
|
C
|
mit
|
iMartinKiss/Objective-Chain,Tricertops/Objective-Chain
|
c
|
## Code Before:
//
// OCATransformer.h
// Objective-Chain
//
// Created by Martin Kiss on 31.12.13.
// Copyright © 2014 Martin Kiss. All rights reserved.
//
#import "OCATransformer+Base.h"
#import "OCATransformer+Nil.h"
#import "OCATransformer+Predefined.h"
## Instruction:
Fix import of removed file
## Code After:
//
// OCATransformer.h
// Objective-Chain
//
// Created by Martin Kiss on 31.12.13.
// Copyright © 2014 Martin Kiss. All rights reserved.
//
#import "OCATransformer+Base.h"
#import "OCATransformer+Predefined.h"
|
// ... existing code ...
#import "OCATransformer+Base.h"
#import "OCATransformer+Predefined.h"
// ... rest of the code ...
|
616bd7c5ff8ba5fe5dd190a459b93980613a3ad4
|
myuw_mobile/restclients/dao_implementation/hfs.py
|
myuw_mobile/restclients/dao_implementation/hfs.py
|
from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
class File(object):
"""
This implementation returns mock/static content.
Use this DAO with this configuration:
RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File'
"""
def getURL(self, url, headers):
"""
Return the url for accessing the mock data in local file
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
return get_mockdata_url("hfs", "file", url, headers,
dir_base=dirname(__file__))
class Live(object):
"""
This DAO provides real data.
Access is restricted to localhost.
"""
pool = None
def getURL(self, url, headers):
"""
Return the absolute url for accessing live data
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost/'
if Live.pool == None:
Live.pool = get_con_pool(host, None, None)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
|
from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
import logging
from myuw_mobile.logger.logback import log_info
class File(object):
"""
This implementation returns mock/static content.
Use this DAO with this configuration:
RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File'
"""
def getURL(self, url, headers):
"""
Return the url for accessing the mock data in local file
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
return get_mockdata_url("hfs", "file", url, headers,
dir_base=dirname(__file__))
class Live(object):
"""
This DAO provides real data.
Access is restricted to localhost.
"""
logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live')
pool = None
def getURL(self, url, headers):
"""
Return the absolute url for accessing live data
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost:80/'
if Live.pool is None:
Live.pool = get_con_pool(host, None, None,
socket_timeout=5.0,
max_pool_size=5)
log_info(Live.logger, Live.pool)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
|
Fix bug: must specify the port number.
|
Fix bug: must specify the port number.
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw,fanglinfang/myuw,fanglinfang/myuw,uw-it-aca/myuw
|
python
|
## Code Before:
from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
class File(object):
"""
This implementation returns mock/static content.
Use this DAO with this configuration:
RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File'
"""
def getURL(self, url, headers):
"""
Return the url for accessing the mock data in local file
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
return get_mockdata_url("hfs", "file", url, headers,
dir_base=dirname(__file__))
class Live(object):
"""
This DAO provides real data.
Access is restricted to localhost.
"""
pool = None
def getURL(self, url, headers):
"""
Return the absolute url for accessing live data
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost/'
if Live.pool == None:
Live.pool = get_con_pool(host, None, None)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
## Instruction:
Fix bug: must specify the port number.
## Code After:
from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
import logging
from myuw_mobile.logger.logback import log_info
class File(object):
"""
This implementation returns mock/static content.
Use this DAO with this configuration:
RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File'
"""
def getURL(self, url, headers):
"""
Return the url for accessing the mock data in local file
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
return get_mockdata_url("hfs", "file", url, headers,
dir_base=dirname(__file__))
class Live(object):
"""
This DAO provides real data.
Access is restricted to localhost.
"""
logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live')
pool = None
def getURL(self, url, headers):
"""
Return the absolute url for accessing live data
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost:80/'
if Live.pool is None:
Live.pool = get_con_pool(host, None, None,
socket_timeout=5.0,
max_pool_size=5)
log_info(Live.logger, Live.pool)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
|
// ... existing code ...
from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
import logging
from myuw_mobile.logger.logback import log_info
class File(object):
"""
// ... modified code ...
This DAO provides real data.
Access is restricted to localhost.
"""
logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live')
pool = None
def getURL(self, url, headers):
...
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost:80/'
if Live.pool is None:
Live.pool = get_con_pool(host, None, None,
socket_timeout=5.0,
max_pool_size=5)
log_info(Live.logger, Live.pool)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
// ... rest of the code ...
|
27a0165d45f52114ebb65d59cf8e4f84f3232881
|
tests/test_lattice.py
|
tests/test_lattice.py
|
import rml.lattice
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
|
import rml.lattice
import rml.element
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice()
element_length = 1.5
e = rml.element.Element('dummy', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() = element_length
|
Test simple lattice with one element.
|
Test simple lattice with one element.
|
Python
|
apache-2.0
|
willrogers/pml,willrogers/pml,razvanvasile/RML
|
python
|
## Code Before:
import rml.lattice
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
## Instruction:
Test simple lattice with one element.
## Code After:
import rml.lattice
import rml.element
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice()
element_length = 1.5
e = rml.element.Element('dummy', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() = element_length
|
# ... existing code ...
import rml.lattice
import rml.element
def test_create_lattice():
l = rml.lattice.Lattice()
assert(len(l)) == 0
def test_non_negative_lattice():
l = rml.lattice.Lattice()
assert(len(l)) >= 0
def test_lattice_with_one_element():
l = rml.lattice.Lattice()
element_length = 1.5
e = rml.element.Element('dummy', element_length)
l.append_element(e)
# There is one element in the lattice.
assert(len(l) == 1)
# The total length of the lattice is the same as its one element.
assert l.length() = element_length
# ... rest of the code ...
|
58823e20e3891cea7198be15b7c85395521086e1
|
extension_course/tests/conftest.py
|
extension_course/tests/conftest.py
|
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
Remove some needless code from course extension tests
|
Remove some needless code from course extension tests
|
Python
|
mit
|
City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents
|
python
|
## Code Before:
import pytest
from django.conf import settings
from django.core.management import call_command
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings)
# Django test harness tries to serialize DB in order to support transactions
# within tests. (It restores the snapshot after such tests).
# This fails with modeltranslate, as the serialization is done before
# sync_translation_fields has a chance to run. Thus the fields are missing
# and serialization fails horribly.
#@pytest.fixture(scope='session')
#def django_db_modify_db_settings(django_db_modify_db_settings_xdist_suffix):
# settings.DATABASES['default']['TEST']['SERIALIZE'] = False
@pytest.fixture(scope='session')
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
call_command('sync_translation_fields', '--noinput')
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
## Instruction:
Remove some needless code from course extension tests
## Code After:
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
def auto_enable(settings):
settings.AUTO_ENABLED_EXTENSIONS = ['course']
|
# ... existing code ...
import pytest
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa
location_id, minimal_event_dict, municipality, organization, place, user,
user_api_client, django_db_modify_db_settings, django_db_setup)
@pytest.fixture(autouse=True)
# ... rest of the code ...
|
0fbd183a95c65eb80bb813368eeb045e9c43b630
|
ray/util.py
|
ray/util.py
|
import json
import itertools as it
all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0]
all_types = ['Color', 'Texture', 'Edge', 'Orientation']
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfn, outdir='.'):
d = {}
d['images'] = [{'name': ilbfn}]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
with open(jsonfn, 'w') as f:
json.dump(d, f)
|
import json
import itertools as it
all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0]
all_types = ['Color', 'Texture', 'Edge', 'Orientation']
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfns, outdir='.'):
if isinstance(ilbfns, str) or isinstance(ilbfns, unicode):
ilbfns = [ilbfns]
d = {}
d['images'] = [{'name': ilbfn} for ilbfn in ilbfns]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
with open(jsonfn, 'w') as f:
json.dump(d, f)
|
Allow multiple batch files for seg pipeline json
|
Allow multiple batch files for seg pipeline json
|
Python
|
bsd-3-clause
|
janelia-flyem/gala,jni/gala,jni/ray
|
python
|
## Code Before:
import json
import itertools as it
all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0]
all_types = ['Color', 'Texture', 'Edge', 'Orientation']
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfn, outdir='.'):
d = {}
d['images'] = [{'name': ilbfn}]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
with open(jsonfn, 'w') as f:
json.dump(d, f)
## Instruction:
Allow multiple batch files for seg pipeline json
## Code After:
import json
import itertools as it
all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0]
all_types = ['Color', 'Texture', 'Edge', 'Orientation']
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfns, outdir='.'):
if isinstance(ilbfns, str) or isinstance(ilbfns, unicode):
ilbfns = [ilbfns]
d = {}
d['images'] = [{'name': ilbfn} for ilbfn in ilbfns]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
with open(jsonfn, 'w') as f:
json.dump(d, f)
|
# ... existing code ...
full_feature_set = list(it.product(all_types, all_sizes))
default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1]))
def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfns, outdir='.'):
if isinstance(ilbfns, str) or isinstance(ilbfns, unicode):
ilbfns = [ilbfns]
d = {}
d['images'] = [{'name': ilbfn} for ilbfn in ilbfns]
d['session'] = ilfn
d['output_dir'] = outdir
d['features'] = default_feature_set
# ... rest of the code ...
|
a9ac098ec492739f37005c9bd6278105df0261c5
|
parliamentsearch/items.py
|
parliamentsearch/items.py
|
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
|
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
q_url = scrapy.Field()
q_annex = scrapy.Field()
|
Add fields to save question url and annexure links
|
Add fields to save question url and annexure links
Details of each question is in another link and some questions have annexures
(in English/Hindi), add fields to save all these items
Signed-off-by: Arun Siluvery <[email protected]>
|
Python
|
mit
|
mthipparthi/parliament-search
|
python
|
## Code Before:
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
## Instruction:
Add fields to save question url and annexure links
Details of each question is in another link and some questions have annexures
(in English/Hindi), add fields to save all these items
Signed-off-by: Arun Siluvery <[email protected]>
## Code After:
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
q_url = scrapy.Field()
q_annex = scrapy.Field()
|
// ... existing code ...
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
q_url = scrapy.Field()
q_annex = scrapy.Field()
// ... rest of the code ...
|
3632ad692110b71b656e0e1e98781ab68ac3a277
|
src/main/java/com/darwinsys/clientware/UrlJaxRsClient.java
|
src/main/java/com/darwinsys/clientware/UrlJaxRsClient.java
|
package com.darwinsys.clientware;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* A JAX-RS client NOT using the JAX-RS Client API
* In fact, not using anything except Core Java APIs.
*/
public class UrlJaxRsClient {
static final String BASE_URL =
"http://androidcookbook.com/seam/resource/rest/recipe";
public static void main(String[] args) throws Exception {
URL url = new URL(BASE_URL + "/4");
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.addRequestProperty("Accept", "application/json");
InputStream is = connection.getInputStream();
// Read "is" to get the response from a GET
BufferedReader bi = new BufferedReader(new InputStreamReader(is));
String line = null;
while ((line = bi.readLine()) != null) {
System.out.println(line);
}
}
}
|
package com.darwinsys.clientware;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* A JAX-RS client NOT using the JAX-RS Client API
* In fact, not using anything except Core Java APIs.
*/
public class UrlJaxRsClient {
static final String BASE_URL =
"http://androidcookbook.com/seam/resource/rest/recipe";
public static void main(String[] args) throws Exception {
final int recipeId = 4;
URL url = new URL(BASE_URL + "/" + recipeId);
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.addRequestProperty("Accept", "application/json");
InputStream is = connection.getInputStream();
// Read "is" to get the response from a GET
BufferedReader bi = new BufferedReader(new InputStreamReader(is));
String line = null;
while ((line = bi.readLine()) != null) {
System.out.println(line);
}
}
}
|
Clarify client code a bit
|
Clarify client code a bit
|
Java
|
bsd-2-clause
|
IanDarwin/crs577add,IanDarwin/crs577add,IanDarwin/crs577add,IanDarwin/crs577add
|
java
|
## Code Before:
package com.darwinsys.clientware;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* A JAX-RS client NOT using the JAX-RS Client API
* In fact, not using anything except Core Java APIs.
*/
public class UrlJaxRsClient {
static final String BASE_URL =
"http://androidcookbook.com/seam/resource/rest/recipe";
public static void main(String[] args) throws Exception {
URL url = new URL(BASE_URL + "/4");
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.addRequestProperty("Accept", "application/json");
InputStream is = connection.getInputStream();
// Read "is" to get the response from a GET
BufferedReader bi = new BufferedReader(new InputStreamReader(is));
String line = null;
while ((line = bi.readLine()) != null) {
System.out.println(line);
}
}
}
## Instruction:
Clarify client code a bit
## Code After:
package com.darwinsys.clientware;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* A JAX-RS client NOT using the JAX-RS Client API
* In fact, not using anything except Core Java APIs.
*/
public class UrlJaxRsClient {
static final String BASE_URL =
"http://androidcookbook.com/seam/resource/rest/recipe";
public static void main(String[] args) throws Exception {
final int recipeId = 4;
URL url = new URL(BASE_URL + "/" + recipeId);
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.addRequestProperty("Accept", "application/json");
InputStream is = connection.getInputStream();
// Read "is" to get the response from a GET
BufferedReader bi = new BufferedReader(new InputStreamReader(is));
String line = null;
while ((line = bi.readLine()) != null) {
System.out.println(line);
}
}
}
|
...
public class UrlJaxRsClient {
static final String BASE_URL =
"http://androidcookbook.com/seam/resource/rest/recipe";
public static void main(String[] args) throws Exception {
final int recipeId = 4;
URL url = new URL(BASE_URL + "/" + recipeId);
final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.addRequestProperty("Accept", "application/json");
InputStream is = connection.getInputStream();
...
|
387ec6864a25317ff45f9001809c4752047e2637
|
src/os/Linux/findself.c
|
src/os/Linux/findself.c
|
char *os_find_self(void)
{
// PATH_MAX (used by readlink(2)) is not necessarily available
size_t size = 2048, used = 0;
char *path = NULL;
do {
size *= 2;
path = realloc(path, size);
used = readlink("/proc/self/exe", path, size);
} while (used > size && path != NULL);
return path;
}
|
char *os_find_self(void)
{
// PATH_MAX (used by readlink(2)) is not necessarily available
size_t size = 2048, used = 0;
char *path = NULL;
do {
size *= 2;
path = realloc(path, size);
used = readlink("/proc/self/exe", path, size);
path[used - 1] = '\0';
} while (used >= size && path != NULL);
return path;
}
|
Mend loop condition from 344aae0
|
Mend loop condition from 344aae0
|
C
|
mit
|
kulp/tenyr,kulp/tenyr,kulp/tenyr
|
c
|
## Code Before:
char *os_find_self(void)
{
// PATH_MAX (used by readlink(2)) is not necessarily available
size_t size = 2048, used = 0;
char *path = NULL;
do {
size *= 2;
path = realloc(path, size);
used = readlink("/proc/self/exe", path, size);
} while (used > size && path != NULL);
return path;
}
## Instruction:
Mend loop condition from 344aae0
## Code After:
char *os_find_self(void)
{
// PATH_MAX (used by readlink(2)) is not necessarily available
size_t size = 2048, used = 0;
char *path = NULL;
do {
size *= 2;
path = realloc(path, size);
used = readlink("/proc/self/exe", path, size);
path[used - 1] = '\0';
} while (used >= size && path != NULL);
return path;
}
|
// ... existing code ...
size *= 2;
path = realloc(path, size);
used = readlink("/proc/self/exe", path, size);
path[used - 1] = '\0';
} while (used >= size && path != NULL);
return path;
}
// ... rest of the code ...
|
2fc7fd03afb71d79ef63463e60eecfaf625cf3bd
|
intellij-toml/src/main/kotlin/org/toml/lang/TomlFileTypeDetector.kt
|
intellij-toml/src/main/kotlin/org/toml/lang/TomlFileTypeDetector.kt
|
/*
* Use of this source code is governed by the MIT license that can be
* found in the LICENSE file.
*/
package org.toml.lang
import com.intellij.openapi.fileTypes.FileType
import com.intellij.openapi.fileTypes.FileTypeRegistry
import com.intellij.openapi.util.io.ByteSequence
import com.intellij.openapi.vfs.VirtualFile
import org.toml.lang.psi.TomlFileType
class TomlFileTypeDetector : FileTypeRegistry.FileTypeDetector {
override fun getVersion(): Int = 1
override fun detect(file: VirtualFile, firstBytes: ByteSequence, firstCharsIfText: CharSequence?): FileType? =
if (file.name == "config" && file.parent?.name == ".cargo") TomlFileType else null
}
|
/*
* Use of this source code is governed by the MIT license that can be
* found in the LICENSE file.
*/
package org.toml.lang
import com.intellij.openapi.fileTypes.FileType
import com.intellij.openapi.fileTypes.FileTypeRegistry
import com.intellij.openapi.util.io.ByteSequence
import com.intellij.openapi.vfs.VirtualFile
import org.toml.lang.psi.TomlFileType
class TomlFileTypeDetector : FileTypeRegistry.FileTypeDetector {
override fun getVersion() = 1
override fun detect(file: VirtualFile, firstBytes: ByteSequence, firstCharsIfText: CharSequence?): FileType? =
if (file.name == "config" && file.parent?.name == ".cargo") TomlFileType else null
override fun getDetectedFileTypes() = listOf(TomlFileType)
override fun getDesiredContentPrefixLength() = 0
}
|
Use performance optimizations provided by FileTypeDetector API not to slow down IDEs using TOML plugin
|
Use performance optimizations provided by FileTypeDetector API not to slow down IDEs using TOML plugin
|
Kotlin
|
mit
|
intellij-rust/intellij-rust,intellij-rust/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,Undin/intellij-rust,intellij-rust/intellij-rust,Undin/intellij-rust,Undin/intellij-rust,Undin/intellij-rust,intellij-rust/intellij-rust
|
kotlin
|
## Code Before:
/*
* Use of this source code is governed by the MIT license that can be
* found in the LICENSE file.
*/
package org.toml.lang
import com.intellij.openapi.fileTypes.FileType
import com.intellij.openapi.fileTypes.FileTypeRegistry
import com.intellij.openapi.util.io.ByteSequence
import com.intellij.openapi.vfs.VirtualFile
import org.toml.lang.psi.TomlFileType
class TomlFileTypeDetector : FileTypeRegistry.FileTypeDetector {
override fun getVersion(): Int = 1
override fun detect(file: VirtualFile, firstBytes: ByteSequence, firstCharsIfText: CharSequence?): FileType? =
if (file.name == "config" && file.parent?.name == ".cargo") TomlFileType else null
}
## Instruction:
Use performance optimizations provided by FileTypeDetector API not to slow down IDEs using TOML plugin
## Code After:
/*
* Use of this source code is governed by the MIT license that can be
* found in the LICENSE file.
*/
package org.toml.lang
import com.intellij.openapi.fileTypes.FileType
import com.intellij.openapi.fileTypes.FileTypeRegistry
import com.intellij.openapi.util.io.ByteSequence
import com.intellij.openapi.vfs.VirtualFile
import org.toml.lang.psi.TomlFileType
class TomlFileTypeDetector : FileTypeRegistry.FileTypeDetector {
override fun getVersion() = 1
override fun detect(file: VirtualFile, firstBytes: ByteSequence, firstCharsIfText: CharSequence?): FileType? =
if (file.name == "config" && file.parent?.name == ".cargo") TomlFileType else null
override fun getDetectedFileTypes() = listOf(TomlFileType)
override fun getDesiredContentPrefixLength() = 0
}
|
...
import org.toml.lang.psi.TomlFileType
class TomlFileTypeDetector : FileTypeRegistry.FileTypeDetector {
override fun getVersion() = 1
override fun detect(file: VirtualFile, firstBytes: ByteSequence, firstCharsIfText: CharSequence?): FileType? =
if (file.name == "config" && file.parent?.name == ".cargo") TomlFileType else null
override fun getDetectedFileTypes() = listOf(TomlFileType)
override fun getDesiredContentPrefixLength() = 0
}
...
|
791e254c6f1efed88bdc0714ee9bb264634e74a8
|
transunit.py
|
transunit.py
|
class TransUnit(object):
"Container for XLIFF trans-unit element"
def __init__(self, argument):
self.origin_unit = argument
self.attributes = argument.attrib
self.id = ''
self.ns = ''
self.state = ''
@staticmethod
def create(xml_tu):
tunit = TransUnit(xml_tu)
tunit.id = tunit.attributes['id']
tunit.ns = tunit.__read_ns()
tunit.state = tunit.__get_state_from_target()
return tunit
def __get_state_from_target(self):
target = self.origin_unit.find('{}target'.format(self.ns))
if "state" in target.attrib.keys():
return target.attrib['state']
else:
return ''
def __has_ns(self):
return '{' in self.origin_unit.tag
def __read_ns(self):
if self.__has_ns():
ns, tag = self.origin_unit.tag.split('}')
ns = ns + '}'
return ns
else:
return ''
def has_any_state(self, list_of_states):
return self.state in list_of_states
|
class TransUnit(object):
"Container for XLIFF trans-unit element"
def __init__(self, argument):
self.origin_unit = argument
self.attributes = argument.attrib
self.id = ''
self.ns = ''
self.state = ''
@staticmethod
def create(xml_tu):
tunit = TransUnit(xml_tu)
tunit.id = tunit.attributes['id']
tunit.ns = tunit._read_ns()
tunit.state = tunit._get_state_from_target()
return tunit
def _read_ns(self):
if self._has_ns():
ns, tag = self.origin_unit.tag.split('}')
ns = ns + '}'
return ns
else:
return ''
def _has_ns(self):
return '{' in self.origin_unit.tag
def _get_state_from_target(self):
target = self.origin_unit.find('{}target'.format(self.ns))
if "state" in target.attrib.keys():
return target.attrib['state']
else:
return ''
def has_any_state(self, list_of_states):
return self.state in list_of_states
|
Restructure transUnit class for better readibility
|
Restructure transUnit class for better readibility
|
Python
|
mit
|
jakub-szczepaniak/xliff
|
python
|
## Code Before:
class TransUnit(object):
"Container for XLIFF trans-unit element"
def __init__(self, argument):
self.origin_unit = argument
self.attributes = argument.attrib
self.id = ''
self.ns = ''
self.state = ''
@staticmethod
def create(xml_tu):
tunit = TransUnit(xml_tu)
tunit.id = tunit.attributes['id']
tunit.ns = tunit.__read_ns()
tunit.state = tunit.__get_state_from_target()
return tunit
def __get_state_from_target(self):
target = self.origin_unit.find('{}target'.format(self.ns))
if "state" in target.attrib.keys():
return target.attrib['state']
else:
return ''
def __has_ns(self):
return '{' in self.origin_unit.tag
def __read_ns(self):
if self.__has_ns():
ns, tag = self.origin_unit.tag.split('}')
ns = ns + '}'
return ns
else:
return ''
def has_any_state(self, list_of_states):
return self.state in list_of_states
## Instruction:
Restructure transUnit class for better readibility
## Code After:
class TransUnit(object):
"Container for XLIFF trans-unit element"
def __init__(self, argument):
self.origin_unit = argument
self.attributes = argument.attrib
self.id = ''
self.ns = ''
self.state = ''
@staticmethod
def create(xml_tu):
tunit = TransUnit(xml_tu)
tunit.id = tunit.attributes['id']
tunit.ns = tunit._read_ns()
tunit.state = tunit._get_state_from_target()
return tunit
def _read_ns(self):
if self._has_ns():
ns, tag = self.origin_unit.tag.split('}')
ns = ns + '}'
return ns
else:
return ''
def _has_ns(self):
return '{' in self.origin_unit.tag
def _get_state_from_target(self):
target = self.origin_unit.find('{}target'.format(self.ns))
if "state" in target.attrib.keys():
return target.attrib['state']
else:
return ''
def has_any_state(self, list_of_states):
return self.state in list_of_states
|
...
class TransUnit(object):
"Container for XLIFF trans-unit element"
...
tunit = TransUnit(xml_tu)
tunit.id = tunit.attributes['id']
tunit.ns = tunit._read_ns()
tunit.state = tunit._get_state_from_target()
return tunit
def _read_ns(self):
if self._has_ns():
ns, tag = self.origin_unit.tag.split('}')
ns = ns + '}'
return ns
else:
return ''
def _has_ns(self):
return '{' in self.origin_unit.tag
def _get_state_from_target(self):
target = self.origin_unit.find('{}target'.format(self.ns))
if "state" in target.attrib.keys():
...
else:
return ''
def has_any_state(self, list_of_states):
return self.state in list_of_states
...
|
5e53f1e86fc7c4f1c7b42479684ac393c997ce52
|
client/test/test-unrealcv.py
|
client/test/test-unrealcv.py
|
import unittest, time, sys
from common_conf import *
from test_server import EchoServer, MessageServer
import argparse
import threading
from test_server import TestMessageServer
from test_client import TestClientWithDummyServer
from test_commands import TestCommands
from test_realistic_rendering import TestRealisticRendering
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
suites = []
load = unittest.TestLoader().loadTestsFromTestCase
s = load(TestMessageServer); suites.append(s)
s = load(TestClientWithDummyServer); suites.append(s)
if not args.travis:
s = load(TestCommands); suites.append(s)
s = load(TestRealisticRendering); suites.append(s)
suite_obj = unittest.TestSuite(suites)
unittest.TextTestRunner(verbosity = 2).run(suite_obj)
|
import unittest, time, sys
from common_conf import *
from test_server import EchoServer, MessageServer
import argparse
import threading
from test_server import TestMessageServer
from test_client import TestClientWithDummyServer
from test_commands import TestCommands
from test_realistic_rendering import TestRealisticRendering
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
suites = []
load = unittest.TestLoader().loadTestsFromTestCase
s = load(TestMessageServer); suites.append(s)
s = load(TestClientWithDummyServer); suites.append(s)
if not args.travis:
s = load(TestCommands); suites.append(s)
s = load(TestRealisticRendering); suites.append(s)
suite_obj = unittest.TestSuite(suites)
ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSucessful()
sys.exit(ret)
|
Fix exit code of unittest.
|
Fix exit code of unittest.
|
Python
|
mit
|
qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv
|
python
|
## Code Before:
import unittest, time, sys
from common_conf import *
from test_server import EchoServer, MessageServer
import argparse
import threading
from test_server import TestMessageServer
from test_client import TestClientWithDummyServer
from test_commands import TestCommands
from test_realistic_rendering import TestRealisticRendering
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
suites = []
load = unittest.TestLoader().loadTestsFromTestCase
s = load(TestMessageServer); suites.append(s)
s = load(TestClientWithDummyServer); suites.append(s)
if not args.travis:
s = load(TestCommands); suites.append(s)
s = load(TestRealisticRendering); suites.append(s)
suite_obj = unittest.TestSuite(suites)
unittest.TextTestRunner(verbosity = 2).run(suite_obj)
## Instruction:
Fix exit code of unittest.
## Code After:
import unittest, time, sys
from common_conf import *
from test_server import EchoServer, MessageServer
import argparse
import threading
from test_server import TestMessageServer
from test_client import TestClientWithDummyServer
from test_commands import TestCommands
from test_realistic_rendering import TestRealisticRendering
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--travis', action='store_true') # Only run test availabe to travis CI
args = parser.parse_args()
suites = []
load = unittest.TestLoader().loadTestsFromTestCase
s = load(TestMessageServer); suites.append(s)
s = load(TestClientWithDummyServer); suites.append(s)
if not args.travis:
s = load(TestCommands); suites.append(s)
s = load(TestRealisticRendering); suites.append(s)
suite_obj = unittest.TestSuite(suites)
ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSucessful()
sys.exit(ret)
|
// ... existing code ...
s = load(TestRealisticRendering); suites.append(s)
suite_obj = unittest.TestSuite(suites)
ret = not unittest.TextTestRunner(verbosity = 2).run(suite_obj).wasSucessful()
sys.exit(ret)
// ... rest of the code ...
|
fa49314b9a97f11bfbd668ae375cd257a59a444b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='rpy2_helpers',
description='Easier R use from Python',
author='Ed L. Cashin',
author_email='[email protected]',
url='https://github.com/ecashin/rpy2_helpers',
version='1.0',
install_requires=[
'click',
'numpy',
'rpy2',
],
)
|
from setuptools import setup
setup(
name='rpy2_helpers',
description='Easier R use from Python',
author='Ed L. Cashin',
author_email='[email protected]',
url='https://github.com/ecashin/rpy2_helpers',
version='0.1',
scripts = ['rpy2_helpers.py'],
install_requires=[
'click',
'numpy',
'rpy2',
],
)
|
Switch to a more descriptive version
|
Switch to a more descriptive version
|
Python
|
mit
|
ecashin/rpy2_helpers
|
python
|
## Code Before:
from setuptools import setup
setup(
name='rpy2_helpers',
description='Easier R use from Python',
author='Ed L. Cashin',
author_email='[email protected]',
url='https://github.com/ecashin/rpy2_helpers',
version='1.0',
install_requires=[
'click',
'numpy',
'rpy2',
],
)
## Instruction:
Switch to a more descriptive version
## Code After:
from setuptools import setup
setup(
name='rpy2_helpers',
description='Easier R use from Python',
author='Ed L. Cashin',
author_email='[email protected]',
url='https://github.com/ecashin/rpy2_helpers',
version='0.1',
scripts = ['rpy2_helpers.py'],
install_requires=[
'click',
'numpy',
'rpy2',
],
)
|
// ... existing code ...
author='Ed L. Cashin',
author_email='[email protected]',
url='https://github.com/ecashin/rpy2_helpers',
version='0.1',
scripts = ['rpy2_helpers.py'],
install_requires=[
'click',
'numpy',
// ... rest of the code ...
|
028cf52b2d09c6cd1ca8c0e1e779cd5d8ff3ca3a
|
tests/test_ubuntupkg.py
|
tests/test_ubuntupkg.py
|
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
Mark ubuntupkg tests as flaky
|
Mark ubuntupkg tests as flaky
|
Python
|
mit
|
lilydjwg/nvchecker
|
python
|
## Code Before:
import pytest
pytestmark = pytest.mark.asyncio
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
## Instruction:
Mark ubuntupkg tests as flaky
## Code After:
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
|
# ... existing code ...
from flaky import flaky
import pytest
pytestmark = pytest.mark.asyncio
@flaky
async def test_ubuntupkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None}) == "0.1.3-1"
@flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "strip-release": 1}) == "0.1.3"
@flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {"ubuntupkg": None, "suite": "xenial"}) == "0.1.2-1"
@flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {"ubuntupkg": None, "suite": "vivid"}) == "7:2.5.10-0ubuntu0.15.04.1"
# ... rest of the code ...
|
4c54e21bfaf52047e4d0790623197df84a7410e4
|
src/sys/utsname.c
|
src/sys/utsname.c
|
/* $Id$ */
/* Copyright (c) 2006 The DeforaOS Project */
#include "../syscalls.h"
#include "sys/utsname.h"
/* uname */
#ifndef __NetBSD__
syscall1(int, uname, struct utsname *, utsname);
#endif
|
/* $Id$ */
/* Copyright (c) 2007 The DeforaOS Project */
#include "../syscalls.h"
#include "sys/utsname.h"
/* uname */
#if !defined(__NetBSD__) || defined(NETBSD_USE_LINUX_EMULATION)
syscall1(int, uname, struct utsname *, utsname);
#endif
|
Allow uname() with linux emulation on NetBSD
|
Allow uname() with linux emulation on NetBSD
|
C
|
bsd-2-clause
|
DeforaOS/libc,DeforaOS/libc
|
c
|
## Code Before:
/* $Id$ */
/* Copyright (c) 2006 The DeforaOS Project */
#include "../syscalls.h"
#include "sys/utsname.h"
/* uname */
#ifndef __NetBSD__
syscall1(int, uname, struct utsname *, utsname);
#endif
## Instruction:
Allow uname() with linux emulation on NetBSD
## Code After:
/* $Id$ */
/* Copyright (c) 2007 The DeforaOS Project */
#include "../syscalls.h"
#include "sys/utsname.h"
/* uname */
#if !defined(__NetBSD__) || defined(NETBSD_USE_LINUX_EMULATION)
syscall1(int, uname, struct utsname *, utsname);
#endif
|
# ... existing code ...
/* $Id$ */
/* Copyright (c) 2007 The DeforaOS Project */
# ... modified code ...
/* uname */
#if !defined(__NetBSD__) || defined(NETBSD_USE_LINUX_EMULATION)
syscall1(int, uname, struct utsname *, utsname);
#endif
# ... rest of the code ...
|
87d5cf4dc96364fb6fc213d0ba70e35b436c9e9e
|
java/squeek/veganoption/asm/ASMPlugin.java
|
java/squeek/veganoption/asm/ASMPlugin.java
|
package squeek.veganoption.asm;
import java.util.Map;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin;
@IFMLLoadingPlugin.MCVersion("1.7.10")
public class ASMPlugin implements IFMLLoadingPlugin
{
@Override
public String[] getASMTransformerClass()
{
return new String[]{ClassTransformer.class.getName()};
}
@Override
public String getModContainerClass()
{
return null;
}
@Override
public String getSetupClass()
{
return null;
}
@Override
public void injectData(Map<String, Object> data)
{
}
@Override
public String getAccessTransformerClass()
{
return null;
}
}
|
package squeek.veganoption.asm;
import java.util.Map;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin;
@IFMLLoadingPlugin.MCVersion("1.7.10")
@IFMLLoadingPlugin.TransformerExclusions("squeek.veganoption.asm")
public class ASMPlugin implements IFMLLoadingPlugin
{
@Override
public String[] getASMTransformerClass()
{
return new String[]{ClassTransformer.class.getName()};
}
@Override
public String getModContainerClass()
{
return null;
}
@Override
public String getSetupClass()
{
return null;
}
@Override
public void injectData(Map<String, Object> data)
{
}
@Override
public String getAccessTransformerClass()
{
return null;
}
}
|
Exclude asm package from being transformed
|
Exclude asm package from being transformed
|
Java
|
unlicense
|
squeek502/VeganOption,Adaptivity/VeganOption
|
java
|
## Code Before:
package squeek.veganoption.asm;
import java.util.Map;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin;
@IFMLLoadingPlugin.MCVersion("1.7.10")
public class ASMPlugin implements IFMLLoadingPlugin
{
@Override
public String[] getASMTransformerClass()
{
return new String[]{ClassTransformer.class.getName()};
}
@Override
public String getModContainerClass()
{
return null;
}
@Override
public String getSetupClass()
{
return null;
}
@Override
public void injectData(Map<String, Object> data)
{
}
@Override
public String getAccessTransformerClass()
{
return null;
}
}
## Instruction:
Exclude asm package from being transformed
## Code After:
package squeek.veganoption.asm;
import java.util.Map;
import cpw.mods.fml.relauncher.IFMLLoadingPlugin;
@IFMLLoadingPlugin.MCVersion("1.7.10")
@IFMLLoadingPlugin.TransformerExclusions("squeek.veganoption.asm")
public class ASMPlugin implements IFMLLoadingPlugin
{
@Override
public String[] getASMTransformerClass()
{
return new String[]{ClassTransformer.class.getName()};
}
@Override
public String getModContainerClass()
{
return null;
}
@Override
public String getSetupClass()
{
return null;
}
@Override
public void injectData(Map<String, Object> data)
{
}
@Override
public String getAccessTransformerClass()
{
return null;
}
}
|
...
import cpw.mods.fml.relauncher.IFMLLoadingPlugin;
@IFMLLoadingPlugin.MCVersion("1.7.10")
@IFMLLoadingPlugin.TransformerExclusions("squeek.veganoption.asm")
public class ASMPlugin implements IFMLLoadingPlugin
{
...
|
6785f6ef2287bc161085bcca7f1cb8653b88a433
|
resolwe/flow/management/commands/cleantestdir.py
|
resolwe/flow/management/commands/cleantestdir.py
|
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if volume_config["config"].get("read_only", False) == False
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if not volume_config["config"].get("read_only", False)
and volume_config["type"] == "host_path"
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
Clean only volumes of type host_path
|
Clean only volumes of type host_path
|
Python
|
apache-2.0
|
genialis/resolwe,genialis/resolwe
|
python
|
## Code Before:
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if volume_config["config"].get("read_only", False) == False
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
## Instruction:
Clean only volumes of type host_path
## Code After:
import re
import shutil
from itertools import chain
from pathlib import Path
from django.core.management.base import BaseCommand
from resolwe.storage import settings as storage_settings
from resolwe.storage.connectors import connectors
TEST_DIR_REGEX = r"^test_.*_\d+$"
class Command(BaseCommand):
"""Cleanup files created during testing."""
help = "Cleanup files created during testing."
def handle(self, *args, **kwargs):
"""Cleanup files created during testing."""
directories = [
Path(connector.path)
for connector in chain(
connectors.for_storage("data"), connectors.for_storage("upload")
)
if connector.mountable
]
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if not volume_config["config"].get("read_only", False)
and volume_config["type"] == "host_path"
]
for directory in directories:
directory = directory.resolve()
for test_dir in directory.iterdir():
if not test_dir.is_dir():
continue
if not re.match(TEST_DIR_REGEX, test_dir.name):
continue
shutil.rmtree(test_dir)
|
// ... existing code ...
directories += [
Path(volume_config["config"]["path"])
for volume_name, volume_config in storage_settings.FLOW_VOLUMES.items()
if not volume_config["config"].get("read_only", False)
and volume_config["type"] == "host_path"
]
for directory in directories:
// ... rest of the code ...
|
6bd7891e0cfcedc1a5d0813b644b5d6bb941045a
|
gaphor/abc.py
|
gaphor/abc.py
|
from __future__ import annotations
import abc
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from gaphor.core.modeling import Element
from gaphor.diagram.diagramtoolbox import ToolboxDefinition
class Service(metaclass=abc.ABCMeta):
"""Base interface for all services in Gaphor."""
@abc.abstractmethod
def shutdown(self) -> None:
"""Shutdown the services, free resources."""
class ActionProvider(metaclass=abc.ABCMeta):
"""An action provider is a special service that provides actions (see
gaphor/action.py)."""
class ModelingLanguage(metaclass=abc.ABCMeta):
"""A model provider is a special service that provides an entrypoint to a
model implementation, such as UML, SysML, RAAML."""
@abc.abstractproperty
def name(self) -> str:
"""Human readable name of the model."""
@abc.abstractproperty
def toolbox_definition(self) -> ToolboxDefinition:
"""Get structure for the toolbox."""
@abc.abstractmethod
def lookup_element(self, name: str) -> type[Element] | None:
"""Look up a model element type (class) by name."""
|
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from gaphor.core.modeling import Element
from gaphor.diagram.diagramtoolbox import ToolboxDefinition
class Service(metaclass=ABCMeta):
"""Base interface for all services in Gaphor."""
@abstractmethod
def shutdown(self) -> None:
"""Shutdown the services, free resources."""
class ActionProvider(metaclass=ABCMeta):
"""An action provider is a special service that provides actions (see
gaphor/action.py)."""
class ModelingLanguage(metaclass=ABCMeta):
"""A model provider is a special service that provides an entrypoint to a
model implementation, such as UML, SysML, RAAML."""
@property
@abstractmethod
def name(self) -> str:
"""Human readable name of the model."""
@property
@abstractmethod
def toolbox_definition(self) -> ToolboxDefinition:
"""Get structure for the toolbox."""
@abstractmethod
def lookup_element(self, name: str) -> type[Element] | None:
"""Look up a model element type (class) by name."""
|
Remove use of deprecated abstractproperty
|
Remove use of deprecated abstractproperty
Signed-off-by: Dan Yeaw <[email protected]>
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
python
|
## Code Before:
from __future__ import annotations
import abc
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from gaphor.core.modeling import Element
from gaphor.diagram.diagramtoolbox import ToolboxDefinition
class Service(metaclass=abc.ABCMeta):
"""Base interface for all services in Gaphor."""
@abc.abstractmethod
def shutdown(self) -> None:
"""Shutdown the services, free resources."""
class ActionProvider(metaclass=abc.ABCMeta):
"""An action provider is a special service that provides actions (see
gaphor/action.py)."""
class ModelingLanguage(metaclass=abc.ABCMeta):
"""A model provider is a special service that provides an entrypoint to a
model implementation, such as UML, SysML, RAAML."""
@abc.abstractproperty
def name(self) -> str:
"""Human readable name of the model."""
@abc.abstractproperty
def toolbox_definition(self) -> ToolboxDefinition:
"""Get structure for the toolbox."""
@abc.abstractmethod
def lookup_element(self, name: str) -> type[Element] | None:
"""Look up a model element type (class) by name."""
## Instruction:
Remove use of deprecated abstractproperty
Signed-off-by: Dan Yeaw <[email protected]>
## Code After:
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from gaphor.core.modeling import Element
from gaphor.diagram.diagramtoolbox import ToolboxDefinition
class Service(metaclass=ABCMeta):
"""Base interface for all services in Gaphor."""
@abstractmethod
def shutdown(self) -> None:
"""Shutdown the services, free resources."""
class ActionProvider(metaclass=ABCMeta):
"""An action provider is a special service that provides actions (see
gaphor/action.py)."""
class ModelingLanguage(metaclass=ABCMeta):
"""A model provider is a special service that provides an entrypoint to a
model implementation, such as UML, SysML, RAAML."""
@property
@abstractmethod
def name(self) -> str:
"""Human readable name of the model."""
@property
@abstractmethod
def toolbox_definition(self) -> ToolboxDefinition:
"""Get structure for the toolbox."""
@abstractmethod
def lookup_element(self, name: str) -> type[Element] | None:
"""Look up a model element type (class) by name."""
|
...
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from typing import TYPE_CHECKING
if TYPE_CHECKING:
...
from gaphor.diagram.diagramtoolbox import ToolboxDefinition
class Service(metaclass=ABCMeta):
"""Base interface for all services in Gaphor."""
@abstractmethod
def shutdown(self) -> None:
"""Shutdown the services, free resources."""
class ActionProvider(metaclass=ABCMeta):
"""An action provider is a special service that provides actions (see
gaphor/action.py)."""
class ModelingLanguage(metaclass=ABCMeta):
"""A model provider is a special service that provides an entrypoint to a
model implementation, such as UML, SysML, RAAML."""
@property
@abstractmethod
def name(self) -> str:
"""Human readable name of the model."""
@property
@abstractmethod
def toolbox_definition(self) -> ToolboxDefinition:
"""Get structure for the toolbox."""
@abstractmethod
def lookup_element(self, name: str) -> type[Element] | None:
"""Look up a model element type (class) by name."""
...
|
49bf8bd8137928a1dc5165f38f8abfe423f5e7f0
|
pi_director/controllers/user_controls.py
|
pi_director/controllers/user_controls.py
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
Create the user if it isn't already in the database first, then make it an admin.
|
Create the user if it isn't already in the database first, then make it an admin.
|
Python
|
mit
|
selfcommit/pi_director,PeterGrace/pi_director,selfcommit/pi_director,PeterGrace/pi_director,PeterGrace/pi_director,selfcommit/pi_director
|
python
|
## Code Before:
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
## Instruction:
Create the user if it isn't already in the database first, then make it an admin.
## Code After:
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
...
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
...
|
3a74774a42521f4b68e484855d103495438095c3
|
examples/schema/targetinfo.py
|
examples/schema/targetinfo.py
|
import jsl
class TargetInfo(jsl.Document):
docker = jsl.ArrayField(jsl.StringField(), max_items=2)
rsync = jsl.ArrayField(jsl.StringField(), max_items=2)
containers = jsl.ArrayField([
jsl.StringField(),
jsl.ArrayField(jsl.StringField())
])
|
import jsl
class TargetInfo(jsl.Document):
docker = jsl.ArrayField([
jsl.StringField(),
jsl.OneOfField([jsl.StringField(), jsl.NullField()])
])
rsync = jsl.ArrayField([
jsl.StringField(),
jsl.OneOfField([jsl.StringField(), jsl.NullField()])
])
containers = jsl.ArrayField([
jsl.StringField(),
jsl.ArrayField(jsl.StringField())
])
|
Correct the target info schema: docker and rsync messages are Null in case of success. Suggested by @vinzenz and corrected by @artmello.
|
Correct the target info schema: docker and rsync messages are Null in case of
success. Suggested by @vinzenz and corrected by @artmello.
|
Python
|
apache-2.0
|
leapp-to/snactor
|
python
|
## Code Before:
import jsl
class TargetInfo(jsl.Document):
docker = jsl.ArrayField(jsl.StringField(), max_items=2)
rsync = jsl.ArrayField(jsl.StringField(), max_items=2)
containers = jsl.ArrayField([
jsl.StringField(),
jsl.ArrayField(jsl.StringField())
])
## Instruction:
Correct the target info schema: docker and rsync messages are Null in case of
success. Suggested by @vinzenz and corrected by @artmello.
## Code After:
import jsl
class TargetInfo(jsl.Document):
docker = jsl.ArrayField([
jsl.StringField(),
jsl.OneOfField([jsl.StringField(), jsl.NullField()])
])
rsync = jsl.ArrayField([
jsl.StringField(),
jsl.OneOfField([jsl.StringField(), jsl.NullField()])
])
containers = jsl.ArrayField([
jsl.StringField(),
jsl.ArrayField(jsl.StringField())
])
|
// ... existing code ...
class TargetInfo(jsl.Document):
docker = jsl.ArrayField([
jsl.StringField(),
jsl.OneOfField([jsl.StringField(), jsl.NullField()])
])
rsync = jsl.ArrayField([
jsl.StringField(),
jsl.OneOfField([jsl.StringField(), jsl.NullField()])
])
containers = jsl.ArrayField([
jsl.StringField(),
jsl.ArrayField(jsl.StringField())
// ... rest of the code ...
|
5e671fe98093cf506ce1cb134c335cabd934ad84
|
aioredis/locks.py
|
aioredis/locks.py
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
Fix critical bug with patched Lock
|
Fix critical bug with patched Lock
|
Python
|
mit
|
aio-libs/aioredis,aio-libs/aioredis,ymap/aioredis
|
python
|
## Code Before:
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
## Instruction:
Fix critical bug with patched Lock
## Code After:
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
...
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
...
|
54ec1c1815f4ca6306eceac7b3297aa970962ec0
|
src/enums.h
|
src/enums.h
|
enum MidiDataRole {
MidiValueType = Qt::UserRole + 1,
MidiValueMin,
MidiValueMax
};
enum MidiType {
DefaultType,
NoteType,
ToggleType,
StringType,
ChannelType
};
#endif
|
enum MidiDataRole {
MidiValueType = Qt::UserRole + 1,
MidiValueMin,
MidiValueMax,
MidiValues
};
enum MidiType {
DefaultType,
NoteType,
ToggleType,
StringType,
ChannelType
};
#endif
|
Add custom data role MidiValues
|
Add custom data role MidiValues
|
C
|
mit
|
charlesfleche/lpd8-editor,charlesfleche/lpd8-editor
|
c
|
## Code Before:
enum MidiDataRole {
MidiValueType = Qt::UserRole + 1,
MidiValueMin,
MidiValueMax
};
enum MidiType {
DefaultType,
NoteType,
ToggleType,
StringType,
ChannelType
};
#endif
## Instruction:
Add custom data role MidiValues
## Code After:
enum MidiDataRole {
MidiValueType = Qt::UserRole + 1,
MidiValueMin,
MidiValueMax,
MidiValues
};
enum MidiType {
DefaultType,
NoteType,
ToggleType,
StringType,
ChannelType
};
#endif
|
...
enum MidiDataRole {
MidiValueType = Qt::UserRole + 1,
MidiValueMin,
MidiValueMax,
MidiValues
};
enum MidiType {
...
|
628acc65692b03485a51aca304f69c984c5db0f8
|
benchmarks/src/main/java/etomica/simulation/BenchSimCatalysis.java
|
benchmarks/src/main/java/etomica/simulation/BenchSimCatalysis.java
|
package etomica.simulation;
import etomica.modules.catalysis.Catalysis;
import etomica.space3d.Space3D;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.profile.StackProfiler;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@Fork(1)
public class BenchSimCatalysis {
private Catalysis sim;
@Param("20")
private int nCellsZ;
@Setup
public void setup() {
sim = new Catalysis(Space3D.getInstance(), nCellsZ);
sim.integrator.reset();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Warmup(time = 3, iterations = 5)
@Measurement(time = 10, timeUnit = TimeUnit.SECONDS, iterations = 3)
public long integratorStep() {
sim.integrator.doStep();
return sim.integrator.getStepCount();
}
public static void main(String[] args) throws RunnerException {
Options opts = new OptionsBuilder()
.include(BenchSimCatalysis.class.getSimpleName())
.addProfiler(StackProfiler.class)
.build();
new Runner(opts).run();
}
}
|
package etomica.simulation;
import etomica.modules.catalysis.Catalysis;
import etomica.space3d.Space3D;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.profile.StackProfiler;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@Fork(1)
public class BenchSimCatalysis {
private Catalysis sim;
@Param("20")
private int nCellsZ;
@Setup(Level.Iteration)
public void setup() {
sim = new Catalysis(Space3D.getInstance(), nCellsZ);
sim.integrator.reset();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Warmup(time = 3, iterations = 5)
@Measurement(time = 10, timeUnit = TimeUnit.SECONDS, iterations = 3)
public long integratorStep() {
sim.integrator.doStep();
return sim.integrator.getStepCount();
}
public static void main(String[] args) throws RunnerException {
Options opts = new OptionsBuilder()
.include(BenchSimCatalysis.class.getSimpleName())
.addProfiler(StackProfiler.class)
.build();
new Runner(opts).run();
}
}
|
Make sure Catalysis benchmark sets up between each iteration
|
Make sure Catalysis benchmark sets up between each iteration
Probably why it was so erratic.
|
Java
|
mpl-2.0
|
etomica/etomica,etomica/etomica,etomica/etomica
|
java
|
## Code Before:
package etomica.simulation;
import etomica.modules.catalysis.Catalysis;
import etomica.space3d.Space3D;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.profile.StackProfiler;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@Fork(1)
public class BenchSimCatalysis {
private Catalysis sim;
@Param("20")
private int nCellsZ;
@Setup
public void setup() {
sim = new Catalysis(Space3D.getInstance(), nCellsZ);
sim.integrator.reset();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Warmup(time = 3, iterations = 5)
@Measurement(time = 10, timeUnit = TimeUnit.SECONDS, iterations = 3)
public long integratorStep() {
sim.integrator.doStep();
return sim.integrator.getStepCount();
}
public static void main(String[] args) throws RunnerException {
Options opts = new OptionsBuilder()
.include(BenchSimCatalysis.class.getSimpleName())
.addProfiler(StackProfiler.class)
.build();
new Runner(opts).run();
}
}
## Instruction:
Make sure Catalysis benchmark sets up between each iteration
Probably why it was so erratic.
## Code After:
package etomica.simulation;
import etomica.modules.catalysis.Catalysis;
import etomica.space3d.Space3D;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.profile.StackProfiler;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
@Fork(1)
public class BenchSimCatalysis {
private Catalysis sim;
@Param("20")
private int nCellsZ;
@Setup(Level.Iteration)
public void setup() {
sim = new Catalysis(Space3D.getInstance(), nCellsZ);
sim.integrator.reset();
}
@Benchmark
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@Warmup(time = 3, iterations = 5)
@Measurement(time = 10, timeUnit = TimeUnit.SECONDS, iterations = 3)
public long integratorStep() {
sim.integrator.doStep();
return sim.integrator.getStepCount();
}
public static void main(String[] args) throws RunnerException {
Options opts = new OptionsBuilder()
.include(BenchSimCatalysis.class.getSimpleName())
.addProfiler(StackProfiler.class)
.build();
new Runner(opts).run();
}
}
|
# ... existing code ...
@Param("20")
private int nCellsZ;
@Setup(Level.Iteration)
public void setup() {
sim = new Catalysis(Space3D.getInstance(), nCellsZ);
sim.integrator.reset();
# ... rest of the code ...
|
58dbfa0b449b8e4171c5f9cef1c15db39b52c1f0
|
tests/run_tests.py
|
tests/run_tests.py
|
import os.path
import sys
import subprocess
import unittest
tests_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.dirname(tests_dir))
import secretstorage
if __name__ == '__main__':
major, minor, patch = sys.version_info[:3]
print('Running with Python %d.%d.%d (SecretStorage from %s)' %
(major, minor, patch, os.path.dirname(secretstorage.__file__)))
mock = None
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
loader = unittest.TestLoader()
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(loader.discover(tests_dir))
if mock is not None:
mock.terminate()
sys.exit(not result.wasSuccessful())
|
import os.path
import sys
import subprocess
import unittest
tests_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.dirname(tests_dir))
import secretstorage
if __name__ == '__main__':
major, minor, patch = sys.version_info[:3]
print('Running with Python %d.%d.%d (SecretStorage from %s)' %
(major, minor, patch, os.path.dirname(secretstorage.__file__)))
mock = None
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
assert mock.stdout is not None # for mypy
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
loader = unittest.TestLoader()
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(loader.discover(tests_dir))
if mock is not None:
mock.terminate()
sys.exit(not result.wasSuccessful())
|
Add an assert to make mypy check pass again
|
Add an assert to make mypy check pass again
|
Python
|
bsd-3-clause
|
mitya57/secretstorage
|
python
|
## Code Before:
import os.path
import sys
import subprocess
import unittest
tests_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.dirname(tests_dir))
import secretstorage
if __name__ == '__main__':
major, minor, patch = sys.version_info[:3]
print('Running with Python %d.%d.%d (SecretStorage from %s)' %
(major, minor, patch, os.path.dirname(secretstorage.__file__)))
mock = None
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
loader = unittest.TestLoader()
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(loader.discover(tests_dir))
if mock is not None:
mock.terminate()
sys.exit(not result.wasSuccessful())
## Instruction:
Add an assert to make mypy check pass again
## Code After:
import os.path
import sys
import subprocess
import unittest
tests_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.dirname(tests_dir))
import secretstorage
if __name__ == '__main__':
major, minor, patch = sys.version_info[:3]
print('Running with Python %d.%d.%d (SecretStorage from %s)' %
(major, minor, patch, os.path.dirname(secretstorage.__file__)))
mock = None
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
assert mock.stdout is not None # for mypy
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
loader = unittest.TestLoader()
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(loader.discover(tests_dir))
if mock is not None:
mock.terminate()
sys.exit(not result.wasSuccessful())
|
...
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
assert mock.stdout is not None # for mypy
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
...
|
ad79f01358aa83162730b15507d7d6d3c3575ab3
|
akanda/horizon/configuration/tabs.py
|
akanda/horizon/configuration/tabs.py
|
import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
|
import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
|
Handle missing port and router data
|
Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <[email protected]>
|
Python
|
apache-2.0
|
dreamhost/akanda-horizon,dreamhost/akanda-horizon
|
python
|
## Code Before:
import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
## Instruction:
Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <[email protected]>
## Code After:
import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
|
...
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
...
|
64cb1130811c5e0e1d547ff7a3a03139b831dea5
|
openacademy/model/openacademy_session.py
|
openacademy/model/openacademy_session.py
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher"),
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
glizek/openacademy-project
|
python
|
## Code Before:
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
## Instruction:
[REF] openacademy: Add domain or and ilike
## Code After:
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher"),
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
...
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|',
("instructor", "=", True),
("category_id", "ilike", "Teacher"),
])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
...
|
e33a68f14a13c0340b2dfcbb13931d2185735951
|
scripts/nanopolish_makerange.py
|
scripts/nanopolish_makerange.py
|
from __future__ import print_function
import sys
import argparse
from Bio import SeqIO
parser = argparse.ArgumentParser(description='Partition a genome into a set of overlapping segments')
parser.add_argument('--segment-length', type=int, default=50000)
parser.add_argument('--overlap-length', type=int, default=200)
args, extra = parser.parse_known_args()
if len(extra) != 1:
sys.stderr.write("Error: a genome file is expected\n")
filename = extra[0]
recs = [ (rec.name, len(rec.seq)) for rec in SeqIO.parse(open(filename), "fasta")]
SEGMENT_LENGTH = args.segment_length
OVERLAP_LENGTH = args.overlap_length
MIN_SEGMENT_LENGTH = 5 * OVERLAP_LENGTH
for name, length in recs:
n_segments = (length / SEGMENT_LENGTH) + 1
start = 0
while start < length:
end = start + SEGMENT_LENGTH
# If this segment will end near the end of the contig, extend it to end
if length - end < MIN_SEGMENT_LENGTH:
print("%s:%d-%d" % (name, start, length - 1))
start = length
else:
print("%s:%d-%d" % (name, start, end + OVERLAP_LENGTH))
start = end
|
from __future__ import print_function
import sys
import argparse
from Bio.SeqIO.FastaIO import SimpleFastaParser
parser = argparse.ArgumentParser(description='Partition a genome into a set of overlapping segments')
parser.add_argument('--segment-length', type=int, default=50000)
parser.add_argument('--overlap-length', type=int, default=200)
args, extra = parser.parse_known_args()
if len(extra) != 1:
sys.stderr.write("Error: a genome file is expected\n")
filename = extra[0]
with open(filename) as handle:
recs = [(title.split(None, 1)[0], len(seq))
for title, seq in SimpleFastaParser(handle)]
SEGMENT_LENGTH = args.segment_length
OVERLAP_LENGTH = args.overlap_length
MIN_SEGMENT_LENGTH = 5 * OVERLAP_LENGTH
for name, length in recs:
n_segments = (length / SEGMENT_LENGTH) + 1
start = 0
while start < length:
end = start + SEGMENT_LENGTH
# If this segment will end near the end of the contig, extend it to end
if length - end < MIN_SEGMENT_LENGTH:
print("%s:%d-%d" % (name, start, length - 1))
start = length
else:
print("%s:%d-%d" % (name, start, end + OVERLAP_LENGTH))
start = end
|
Use Biopython's string based FASTA parser
|
Use Biopython's string based FASTA parser
This was introduced in Biopython 1.61 back in February 2013,
so the dependencies shouldn't matter.
You could go further here and use a generator expression
over a list comprehension?
|
Python
|
mit
|
jts/nanopolish,jts/nanopolish,jts/nanopolish,jts/nanopolish,jts/nanopolish
|
python
|
## Code Before:
from __future__ import print_function
import sys
import argparse
from Bio import SeqIO
parser = argparse.ArgumentParser(description='Partition a genome into a set of overlapping segments')
parser.add_argument('--segment-length', type=int, default=50000)
parser.add_argument('--overlap-length', type=int, default=200)
args, extra = parser.parse_known_args()
if len(extra) != 1:
sys.stderr.write("Error: a genome file is expected\n")
filename = extra[0]
recs = [ (rec.name, len(rec.seq)) for rec in SeqIO.parse(open(filename), "fasta")]
SEGMENT_LENGTH = args.segment_length
OVERLAP_LENGTH = args.overlap_length
MIN_SEGMENT_LENGTH = 5 * OVERLAP_LENGTH
for name, length in recs:
n_segments = (length / SEGMENT_LENGTH) + 1
start = 0
while start < length:
end = start + SEGMENT_LENGTH
# If this segment will end near the end of the contig, extend it to end
if length - end < MIN_SEGMENT_LENGTH:
print("%s:%d-%d" % (name, start, length - 1))
start = length
else:
print("%s:%d-%d" % (name, start, end + OVERLAP_LENGTH))
start = end
## Instruction:
Use Biopython's string based FASTA parser
This was introduced in Biopython 1.61 back in February 2013,
so the dependencies shouldn't matter.
You could go further here and use a generator expression
over a list comprehension?
## Code After:
from __future__ import print_function
import sys
import argparse
from Bio.SeqIO.FastaIO import SimpleFastaParser
parser = argparse.ArgumentParser(description='Partition a genome into a set of overlapping segments')
parser.add_argument('--segment-length', type=int, default=50000)
parser.add_argument('--overlap-length', type=int, default=200)
args, extra = parser.parse_known_args()
if len(extra) != 1:
sys.stderr.write("Error: a genome file is expected\n")
filename = extra[0]
with open(filename) as handle:
recs = [(title.split(None, 1)[0], len(seq))
for title, seq in SimpleFastaParser(handle)]
SEGMENT_LENGTH = args.segment_length
OVERLAP_LENGTH = args.overlap_length
MIN_SEGMENT_LENGTH = 5 * OVERLAP_LENGTH
for name, length in recs:
n_segments = (length / SEGMENT_LENGTH) + 1
start = 0
while start < length:
end = start + SEGMENT_LENGTH
# If this segment will end near the end of the contig, extend it to end
if length - end < MIN_SEGMENT_LENGTH:
print("%s:%d-%d" % (name, start, length - 1))
start = length
else:
print("%s:%d-%d" % (name, start, end + OVERLAP_LENGTH))
start = end
|
// ... existing code ...
import sys
import argparse
from Bio.SeqIO.FastaIO import SimpleFastaParser
parser = argparse.ArgumentParser(description='Partition a genome into a set of overlapping segments')
parser.add_argument('--segment-length', type=int, default=50000)
// ... modified code ...
sys.stderr.write("Error: a genome file is expected\n")
filename = extra[0]
with open(filename) as handle:
recs = [(title.split(None, 1)[0], len(seq))
for title, seq in SimpleFastaParser(handle)]
SEGMENT_LENGTH = args.segment_length
OVERLAP_LENGTH = args.overlap_length
// ... rest of the code ...
|
282467d21b0e84727ecddc450fcfbbdb33d851fe
|
bungee/src/main/java/com/github/games647/fastlogin/bungee/BungeeLoginSource.java
|
bungee/src/main/java/com/github/games647/fastlogin/bungee/BungeeLoginSource.java
|
package com.github.games647.fastlogin.bungee;
import com.github.games647.fastlogin.core.shared.LoginSource;
import java.net.InetSocketAddress;
import net.md_5.bungee.api.chat.TextComponent;
import net.md_5.bungee.api.connection.PendingConnection;
import net.md_5.bungee.api.event.PreLoginEvent;
public class BungeeLoginSource implements LoginSource {
private final PendingConnection connection;
private final PreLoginEvent preLoginEvent;
public BungeeLoginSource(PendingConnection connection, PreLoginEvent preLoginEvent) {
this.connection = connection;
this.preLoginEvent = preLoginEvent;
}
@Override
public void setOnlineMode() {
connection.setOnlineMode(true);
}
@Override
public void kick(String message) {
preLoginEvent.setCancelled(true);
if (message != null)
preLoginEvent.setCancelReason(TextComponent.fromLegacyText(message));
}
@Override
public InetSocketAddress getAddress() {
return connection.getAddress();
}
public PendingConnection getConnection() {
return connection;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + '{' +
"connection=" + connection +
'}';
}
}
|
package com.github.games647.fastlogin.bungee;
import com.github.games647.fastlogin.core.shared.LoginSource;
import java.net.InetSocketAddress;
import net.md_5.bungee.api.ChatColor;
import net.md_5.bungee.api.chat.ComponentBuilder;
import net.md_5.bungee.api.chat.TextComponent;
import net.md_5.bungee.api.connection.PendingConnection;
import net.md_5.bungee.api.event.PreLoginEvent;
public class BungeeLoginSource implements LoginSource {
private final PendingConnection connection;
private final PreLoginEvent preLoginEvent;
public BungeeLoginSource(PendingConnection connection, PreLoginEvent preLoginEvent) {
this.connection = connection;
this.preLoginEvent = preLoginEvent;
}
@Override
public void setOnlineMode() {
connection.setOnlineMode(true);
}
@Override
public void kick(String message) {
preLoginEvent.setCancelled(true);
if (message != null)
preLoginEvent.setCancelReason(TextComponent.fromLegacyText(message));
else
preLoginEvent.setCancelReason(new ComponentBuilder("Kicked").color(ChatColor.WHITE).create());
}
@Override
public InetSocketAddress getAddress() {
return connection.getAddress();
}
public PendingConnection getConnection() {
return connection;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + '{' +
"connection=" + connection +
'}';
}
}
|
Set a default kick reason if nothing is specified
|
Set a default kick reason if nothing is specified
|
Java
|
mit
|
AuthMe/FastLogin,games647/FastLogin,sgdc3/FastLogin
|
java
|
## Code Before:
package com.github.games647.fastlogin.bungee;
import com.github.games647.fastlogin.core.shared.LoginSource;
import java.net.InetSocketAddress;
import net.md_5.bungee.api.chat.TextComponent;
import net.md_5.bungee.api.connection.PendingConnection;
import net.md_5.bungee.api.event.PreLoginEvent;
public class BungeeLoginSource implements LoginSource {
private final PendingConnection connection;
private final PreLoginEvent preLoginEvent;
public BungeeLoginSource(PendingConnection connection, PreLoginEvent preLoginEvent) {
this.connection = connection;
this.preLoginEvent = preLoginEvent;
}
@Override
public void setOnlineMode() {
connection.setOnlineMode(true);
}
@Override
public void kick(String message) {
preLoginEvent.setCancelled(true);
if (message != null)
preLoginEvent.setCancelReason(TextComponent.fromLegacyText(message));
}
@Override
public InetSocketAddress getAddress() {
return connection.getAddress();
}
public PendingConnection getConnection() {
return connection;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + '{' +
"connection=" + connection +
'}';
}
}
## Instruction:
Set a default kick reason if nothing is specified
## Code After:
package com.github.games647.fastlogin.bungee;
import com.github.games647.fastlogin.core.shared.LoginSource;
import java.net.InetSocketAddress;
import net.md_5.bungee.api.ChatColor;
import net.md_5.bungee.api.chat.ComponentBuilder;
import net.md_5.bungee.api.chat.TextComponent;
import net.md_5.bungee.api.connection.PendingConnection;
import net.md_5.bungee.api.event.PreLoginEvent;
public class BungeeLoginSource implements LoginSource {
private final PendingConnection connection;
private final PreLoginEvent preLoginEvent;
public BungeeLoginSource(PendingConnection connection, PreLoginEvent preLoginEvent) {
this.connection = connection;
this.preLoginEvent = preLoginEvent;
}
@Override
public void setOnlineMode() {
connection.setOnlineMode(true);
}
@Override
public void kick(String message) {
preLoginEvent.setCancelled(true);
if (message != null)
preLoginEvent.setCancelReason(TextComponent.fromLegacyText(message));
else
preLoginEvent.setCancelReason(new ComponentBuilder("Kicked").color(ChatColor.WHITE).create());
}
@Override
public InetSocketAddress getAddress() {
return connection.getAddress();
}
public PendingConnection getConnection() {
return connection;
}
@Override
public String toString() {
return this.getClass().getSimpleName() + '{' +
"connection=" + connection +
'}';
}
}
|
# ... existing code ...
import java.net.InetSocketAddress;
import net.md_5.bungee.api.ChatColor;
import net.md_5.bungee.api.chat.ComponentBuilder;
import net.md_5.bungee.api.chat.TextComponent;
import net.md_5.bungee.api.connection.PendingConnection;
import net.md_5.bungee.api.event.PreLoginEvent;
# ... modified code ...
if (message != null)
preLoginEvent.setCancelReason(TextComponent.fromLegacyText(message));
else
preLoginEvent.setCancelReason(new ComponentBuilder("Kicked").color(ChatColor.WHITE).create());
}
@Override
# ... rest of the code ...
|
55a6b663997390c58675f5f7e02ebeeb62601128
|
src/main/java/cloud/cluster/sim/requestsimulator/dao/SimulationStatisticsOperations.java
|
src/main/java/cloud/cluster/sim/requestsimulator/dao/SimulationStatisticsOperations.java
|
package cloud.cluster.sim.requestsimulator.dao;
import cloud.cluster.sim.requestsimulator.dto.SimulationStatistics;
import org.springframework.data.mongodb.repository.MongoRepository;
/**
* Define operations on SimulationStatistics
*/
public interface SimulationStatisticsOperations extends MongoRepository<SimulationStatistics, String> {
}
|
package cloud.cluster.sim.requestsimulator.dao;
import cloud.cluster.sim.requestsimulator.dto.SimulationStatistics;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Component;
/**
* Define operations on SimulationStatistics
*/
@Component
public interface SimulationStatisticsOperations extends MongoRepository<SimulationStatistics, String> {
}
|
Add component annotation in order to be able to auto wire component.
|
Add component annotation in order to be able to auto wire component.
|
Java
|
apache-2.0
|
IrimieBogdan/cloud-simulator,IrimieBogdan/cloud-simulator
|
java
|
## Code Before:
package cloud.cluster.sim.requestsimulator.dao;
import cloud.cluster.sim.requestsimulator.dto.SimulationStatistics;
import org.springframework.data.mongodb.repository.MongoRepository;
/**
* Define operations on SimulationStatistics
*/
public interface SimulationStatisticsOperations extends MongoRepository<SimulationStatistics, String> {
}
## Instruction:
Add component annotation in order to be able to auto wire component.
## Code After:
package cloud.cluster.sim.requestsimulator.dao;
import cloud.cluster.sim.requestsimulator.dto.SimulationStatistics;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Component;
/**
* Define operations on SimulationStatistics
*/
@Component
public interface SimulationStatisticsOperations extends MongoRepository<SimulationStatistics, String> {
}
|
# ... existing code ...
import cloud.cluster.sim.requestsimulator.dto.SimulationStatistics;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Component;
/**
* Define operations on SimulationStatistics
*/
@Component
public interface SimulationStatisticsOperations extends MongoRepository<SimulationStatistics, String> {
}
# ... rest of the code ...
|
793b2d601c9cafff4f4d1284da6fcc39bf1023d2
|
common/log.h
|
common/log.h
|
/**
* log.h - Header of log class
* includes 2 methods to show warrnings and errors
* @author Pavel Kryukov
* Copyright 2017 MIPT-MIPS team
*/
#ifndef LOG_H
#define LOG_H
#include <iostream>
#include <ostream>
class LogOstream
{
const bool enable;
std::ostream& stream;
public:
struct Critical { };
LogOstream(bool value, std::ostream& _out) : enable(value), stream(_out) { }
friend LogOstream& operator<<(LogOstream&, const Critical&) {
exit(-1);
}
LogOstream& operator<<(std::ostream& (*F)(std::ostream&)) {
if ( enable)
F(stream);
return *this;
}
template<typename T>
LogOstream& operator<<(const T& v) {
if ( enable) {
stream << v;
}
return *this;
}
};
class Log
{
public:
LogOstream sout;
LogOstream serr;
LogOstream::Critical critical;
Log(bool value) : sout(value, std::cout), serr(true, std::cerr), critical() { }
virtual ~Log() { }
};
#endif /* LOG_H */
|
/**
* log.h - Header of log class
* includes 2 methods to show warrnings and errors
* @author Pavel Kryukov
* Copyright 2017 MIPT-MIPS team
*/
#ifndef LOG_H
#define LOG_H
#include <iostream>
#include <ostream>
class LogOstream
{
const bool enable;
std::ostream& stream;
public:
struct Critical { };
LogOstream(bool value, std::ostream& _out) : enable(value), stream(_out) { }
friend LogOstream& operator<<(LogOstream&, const Critical&) {
exit(-1);
}
LogOstream& operator<<(std::ostream& (*F)(std::ostream&)) {
if ( enable)
F(stream);
return *this;
}
template<typename T>
LogOstream& operator<<(const T& v) {
if ( enable) {
stream << v;
}
return *this;
}
};
class Log
{
public:
mutable LogOstream sout;
mutable LogOstream serr;
const LogOstream::Critical critical;
Log(bool value) : sout(value, std::cout), serr(true, std::cerr), critical() { }
virtual ~Log() { }
};
#endif /* LOG_H */
|
Make serr and sout mutable to use them in const methods
|
Make serr and sout mutable to use them in const methods
|
C
|
mit
|
MIPT-ILab/mipt-mips,MIPT-ILab/mipt-mips,MIPT-ILab/mipt-mips-2015,gkorepanov/mipt-mips,MIPT-ILab/mipt-mips-2015,MIPT-ILab/mipt-mips-2015,MIPT-ILab/mipt-mips-2015
|
c
|
## Code Before:
/**
* log.h - Header of log class
* includes 2 methods to show warrnings and errors
* @author Pavel Kryukov
* Copyright 2017 MIPT-MIPS team
*/
#ifndef LOG_H
#define LOG_H
#include <iostream>
#include <ostream>
class LogOstream
{
const bool enable;
std::ostream& stream;
public:
struct Critical { };
LogOstream(bool value, std::ostream& _out) : enable(value), stream(_out) { }
friend LogOstream& operator<<(LogOstream&, const Critical&) {
exit(-1);
}
LogOstream& operator<<(std::ostream& (*F)(std::ostream&)) {
if ( enable)
F(stream);
return *this;
}
template<typename T>
LogOstream& operator<<(const T& v) {
if ( enable) {
stream << v;
}
return *this;
}
};
class Log
{
public:
LogOstream sout;
LogOstream serr;
LogOstream::Critical critical;
Log(bool value) : sout(value, std::cout), serr(true, std::cerr), critical() { }
virtual ~Log() { }
};
#endif /* LOG_H */
## Instruction:
Make serr and sout mutable to use them in const methods
## Code After:
/**
* log.h - Header of log class
* includes 2 methods to show warrnings and errors
* @author Pavel Kryukov
* Copyright 2017 MIPT-MIPS team
*/
#ifndef LOG_H
#define LOG_H
#include <iostream>
#include <ostream>
class LogOstream
{
const bool enable;
std::ostream& stream;
public:
struct Critical { };
LogOstream(bool value, std::ostream& _out) : enable(value), stream(_out) { }
friend LogOstream& operator<<(LogOstream&, const Critical&) {
exit(-1);
}
LogOstream& operator<<(std::ostream& (*F)(std::ostream&)) {
if ( enable)
F(stream);
return *this;
}
template<typename T>
LogOstream& operator<<(const T& v) {
if ( enable) {
stream << v;
}
return *this;
}
};
class Log
{
public:
mutable LogOstream sout;
mutable LogOstream serr;
const LogOstream::Critical critical;
Log(bool value) : sout(value, std::cout), serr(true, std::cerr), critical() { }
virtual ~Log() { }
};
#endif /* LOG_H */
|
// ... existing code ...
class Log
{
public:
mutable LogOstream sout;
mutable LogOstream serr;
const LogOstream::Critical critical;
Log(bool value) : sout(value, std::cout), serr(true, std::cerr), critical() { }
virtual ~Log() { }
// ... rest of the code ...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.