commit
stringlengths 40
40
| old_file
stringlengths 4
234
| new_file
stringlengths 4
234
| old_contents
stringlengths 10
3.01k
| new_contents
stringlengths 19
3.38k
| subject
stringlengths 16
736
| message
stringlengths 17
2.63k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
82.6k
| config
stringclasses 4
values | content
stringlengths 134
4.41k
| fuzzy_diff
stringlengths 29
3.44k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
9b061d1bef24f46a1897736d9df3a5791e26736e
|
core/build.gradle.kts
|
core/build.gradle.kts
|
import org.jetbrains.registerDokkaArtifactPublication
plugins {
`maven-publish`
id("com.jfrog.bintray")
}
dependencies {
api("org.jetbrains:markdown:0.1.45")
implementation(kotlin("reflect"))
implementation("org.jsoup:jsoup:1.12.1")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.11.1")
testImplementation(project(":testApi"))
testImplementation(kotlin("test-junit"))
}
tasks {
processResources {
val dokka_version: String by project
eachFile {
if (name == "dokka-version.properties") {
filter { line ->
line.replace("<dokka-version>", dokka_version)
}
}
}
}
}
registerDokkaArtifactPublication("dokkaCore") {
artifactId = "dokka-core"
}
|
import org.jetbrains.dokkaVersion
import org.jetbrains.registerDokkaArtifactPublication
plugins {
`maven-publish`
id("com.jfrog.bintray")
}
dependencies {
api("org.jetbrains:markdown:0.1.45")
implementation(kotlin("reflect"))
implementation("org.jsoup:jsoup:1.12.1")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.11.1")
testImplementation(project(":testApi"))
testImplementation(kotlin("test-junit"))
}
tasks {
processResources {
inputs.property("dokkaVersion", dokkaVersion)
eachFile {
if (name == "dokka-version.properties") {
filter { line ->
line.replace("<dokka-version>", dokkaVersion)
}
}
}
}
}
registerDokkaArtifactPublication("dokkaCore") {
artifactId = "dokka-core"
}
|
Declare `dokkaVersion` as input `processResources` task in :core
|
Declare `dokkaVersion` as input `processResources` task in :core
|
Kotlin
|
apache-2.0
|
Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka,Kotlin/dokka
|
kotlin
|
## Code Before:
import org.jetbrains.registerDokkaArtifactPublication
plugins {
`maven-publish`
id("com.jfrog.bintray")
}
dependencies {
api("org.jetbrains:markdown:0.1.45")
implementation(kotlin("reflect"))
implementation("org.jsoup:jsoup:1.12.1")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.11.1")
testImplementation(project(":testApi"))
testImplementation(kotlin("test-junit"))
}
tasks {
processResources {
val dokka_version: String by project
eachFile {
if (name == "dokka-version.properties") {
filter { line ->
line.replace("<dokka-version>", dokka_version)
}
}
}
}
}
registerDokkaArtifactPublication("dokkaCore") {
artifactId = "dokka-core"
}
## Instruction:
Declare `dokkaVersion` as input `processResources` task in :core
## Code After:
import org.jetbrains.dokkaVersion
import org.jetbrains.registerDokkaArtifactPublication
plugins {
`maven-publish`
id("com.jfrog.bintray")
}
dependencies {
api("org.jetbrains:markdown:0.1.45")
implementation(kotlin("reflect"))
implementation("org.jsoup:jsoup:1.12.1")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin:2.11.1")
testImplementation(project(":testApi"))
testImplementation(kotlin("test-junit"))
}
tasks {
processResources {
inputs.property("dokkaVersion", dokkaVersion)
eachFile {
if (name == "dokka-version.properties") {
filter { line ->
line.replace("<dokka-version>", dokkaVersion)
}
}
}
}
}
registerDokkaArtifactPublication("dokkaCore") {
artifactId = "dokka-core"
}
|
...
import org.jetbrains.dokkaVersion
import org.jetbrains.registerDokkaArtifactPublication
plugins {
...
tasks {
processResources {
inputs.property("dokkaVersion", dokkaVersion)
eachFile {
if (name == "dokka-version.properties") {
filter { line ->
line.replace("<dokka-version>", dokkaVersion)
}
}
}
...
|
0c1a0a70154ddf107a6174d49793e369d28f1beb
|
openstack_dashboard/views.py
|
openstack_dashboard/views.py
|
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
from django import shortcuts
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import views
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
Fix default get_user_home with dynamic dashboards
|
Fix default get_user_home with dynamic dashboards
The existing get_user_home implementation expects both the 'admin'
and 'project' dashboards to exist and throws an exception if they
are missing. With the inclusion of configurable dashboard loading,
we can no longer count on certain dashboards being loaded.
Closes-Bug: #1293727
Change-Id: I4ee0b7b313f4e1b27c0daea829c8b38282fa78d9
|
Python
|
apache-2.0
|
bigswitch/horizon,tsufiev/horizon,froyobin/horizon,NeCTAR-RC/horizon,kfox1111/horizon,NeCTAR-RC/horizon,yjxtogo/horizon,RudoCris/horizon,watonyweng/horizon,philoniare/horizon,philoniare/horizon,noironetworks/horizon,eayunstack/horizon,CiscoSystems/avos,Dark-Hacker/horizon,zouyapeng/horizon,nvoron23/avos,mrunge/horizon,xinwu/horizon,agileblaze/OpenStackTwoFactorAuthentication,VaneCloud/horizon,mdavid/horizon,gerrive/horizon,mrunge/openstack_horizon,Daniex/horizon,openstack/horizon,orbitfp7/horizon,yeming233/horizon,kfox1111/horizon,Solinea/horizon,xinwu/horizon,damien-dg/horizon,tqtran7/horizon,mrunge/openstack_horizon,flochaz/horizon,tanglei528/horizon,doug-fish/horizon,Daniex/horizon,wolverineav/horizon,dan1/horizon-x509,saydulk/horizon,ChameleonCloud/horizon,Solinea/horizon,yeming233/horizon,anthonydillon/horizon,tellesnobrega/horizon,Tesora/tesora-horizon,wolverineav/horizon,VaneCloud/horizon,Mirantis/mos-horizon,henaras/horizon,pranavtendolkr/horizon,NCI-Cloud/horizon,eayunstack/horizon,blueboxgroup/horizon,yjxtogo/horizon,takeshineshiro/horizon,tellesnobrega/horizon,pranavtendolkr/horizon,endorphinl/horizon,tanglei528/horizon,davidcusatis/horizon,endorphinl/horizon,yjxtogo/horizon,endorphinl/horizon,anthonydillon/horizon,CiscoSystems/horizon,saydulk/horizon,Metaswitch/horizon,mrunge/openstack_horizon,mdavid/horizon,idjaw/horizon,watonyweng/horizon,sandvine/horizon,maestro-hybrid-cloud/horizon,django-leonardo/horizon,pranavtendolkr/horizon,VaneCloud/horizon,izadorozhna/dashboard_integration_tests,henaras/horizon,karthik-suresh/horizon,kfox1111/horizon,CiscoSystems/avos,saydulk/horizon,coreycb/horizon,eayunstack/horizon,dan1/horizon-x509,j4/horizon,maestro-hybrid-cloud/horizon,FNST-OpenStack/horizon,aaronorosen/horizon-congress,xinwu/horizon,VaneCloud/horizon,Hodorable/0602,mandeepdhami/horizon,newrocknj/horizon,NCI-Cloud/horizon,aaronorosen/horizon-congress,kfox1111/horizon,endorphinl/horizon-fork,redhat-cip/horizon,CiscoSystems/avos,j4/horizon,watonyweng/horizon,FNST-OpenStack/horizon,bac/horizon,NCI-Cloud/horizon,endorphinl/horizon,RudoCris/horizon,orbitfp7/horizon,coreycb/horizon,saydulk/horizon,luhanhan/horizon,dan1/horizon-proto,henaras/horizon,luhanhan/horizon,tqtran7/horizon,tellesnobrega/horizon,doug-fish/horizon,ging/horizon,mrunge/horizon_lib,vladryk/horizon,Dark-Hacker/horizon,Metaswitch/horizon,CiscoSystems/horizon,icloudrnd/automation_tools,sandvine/horizon,luhanhan/horizon,idjaw/horizon,flochaz/horizon,NCI-Cloud/horizon,JioCloud/horizon,damien-dg/horizon,django-leonardo/horizon,luhanhan/horizon,flochaz/horizon,wolverineav/horizon,FNST-OpenStack/horizon,takeshineshiro/horizon,Tesora/tesora-horizon,CiscoSystems/horizon,promptworks/horizon,xme1226/horizon,zouyapeng/horizon,django-leonardo/horizon,nvoron23/avos,Dark-Hacker/horizon,endorphinl/horizon-fork,BiznetGIO/horizon,mandeepdhami/horizon,Metaswitch/horizon,redhat-cip/horizon,coreycb/horizon,noironetworks/horizon,Daniex/horizon,Dark-Hacker/horizon,ChameleonCloud/horizon,mdavid/horizon,tsufiev/horizon,yeming233/horizon,CiscoSystems/horizon,JioCloud/horizon,xme1226/horizon,idjaw/horizon,bigswitch/horizon,ging/horizon,Metaswitch/horizon,BiznetGIO/horizon,karthik-suresh/horizon,mdavid/horizon,Daniex/horizon,philoniare/horizon,vladryk/horizon,froyobin/horizon,django-leonardo/horizon,agileblaze/OpenStackTwoFactorAuthentication,Solinea/horizon,mandeepdhami/horizon,newrocknj/horizon,wangxiangyu/horizon,philoniare/horizon,anthonydillon/horizon,gerrive/horizon,openstack/horizon,redhat-openstack/horizon,doug-fish/horizon,Mirantis/mos-horizon,nvoron23/avos,vladryk/horizon,endorphinl/horizon-fork,liyitest/rr,zouyapeng/horizon,openstack/horizon,icloudrnd/automation_tools,liyitest/rr,tqtran7/horizon,Mirantis/mos-horizon,j4/horizon,yjxtogo/horizon,wolverineav/horizon,ging/horizon,zouyapeng/horizon,henaras/horizon,promptworks/horizon,promptworks/horizon,endorphinl/horizon-fork,promptworks/horizon,blueboxgroup/horizon,blueboxgroup/horizon,tsufiev/horizon,blueboxgroup/horizon,BiznetGIO/horizon,ChameleonCloud/horizon,yeming233/horizon,Mirantis/mos-horizon,noironetworks/horizon,tqtran7/horizon,davidcusatis/horizon,bac/horizon,redhat-cip/horizon,froyobin/horizon,pranavtendolkr/horizon,liyitest/rr,newrocknj/horizon,redhat-openstack/horizon,tsufiev/horizon,Tesora/tesora-horizon,anthonydillon/horizon,wangxiangyu/horizon,dan1/horizon-x509,davidcusatis/horizon,bigswitch/horizon,redhat-openstack/horizon,dan1/horizon-proto,karthik-suresh/horizon,wangxiangyu/horizon,doug-fish/horizon,ging/horizon,JioCloud/horizon,watonyweng/horizon,takeshineshiro/horizon,sandvine/horizon,damien-dg/horizon,NeCTAR-RC/horizon,davidcusatis/horizon,mandeepdhami/horizon,bigswitch/horizon,aaronorosen/horizon-congress,karthik-suresh/horizon,idjaw/horizon,izadorozhna/dashboard_integration_tests,mrunge/horizon_lib,vladryk/horizon,Hodorable/0602,agileblaze/OpenStackTwoFactorAuthentication,xme1226/horizon,orbitfp7/horizon,BiznetGIO/horizon,mrunge/horizon_lib,noironetworks/horizon,gerrive/horizon,Hodorable/0602,agileblaze/OpenStackTwoFactorAuthentication,takeshineshiro/horizon,dan1/horizon-proto,maestro-hybrid-cloud/horizon,CiscoSystems/avos,damien-dg/horizon,sandvine/horizon,mrunge/horizon,FNST-OpenStack/horizon,wangxiangyu/horizon,Solinea/horizon,newrocknj/horizon,dan1/horizon-x509,j4/horizon,RudoCris/horizon,redhat-openstack/horizon,coreycb/horizon,orbitfp7/horizon,gerrive/horizon,mrunge/horizon,RudoCris/horizon,dan1/horizon-proto,tanglei528/horizon,openstack/horizon,icloudrnd/automation_tools,flochaz/horizon,nvoron23/avos,icloudrnd/automation_tools,NeCTAR-RC/horizon,bac/horizon,ChameleonCloud/horizon,redhat-cip/horizon,Hodorable/0602,maestro-hybrid-cloud/horizon,liyitest/rr,bac/horizon,Tesora/tesora-horizon,tellesnobrega/horizon,xinwu/horizon
|
python
|
## Code Before:
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
## Instruction:
Fix default get_user_home with dynamic dashboards
The existing get_user_home implementation expects both the 'admin'
and 'project' dashboards to exist and throws an exception if they
are missing. With the inclusion of configurable dashboard loading,
we can no longer count on certain dashboards being loaded.
Closes-Bug: #1293727
Change-Id: I4ee0b7b313f4e1b27c0daea829c8b38282fa78d9
## Code After:
from django import shortcuts
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import views
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
...
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import views
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
...
|
f5d4da9fa71dbb59a9459e376fde8840037bf39a
|
account_banking_sepa_credit_transfer/__init__.py
|
account_banking_sepa_credit_transfer/__init__.py
|
from . import wizard
from . import models
|
from . import wizard
|
Remove import models from init in sepa_credit_transfer
|
Remove import models from init in sepa_credit_transfer
|
Python
|
agpl-3.0
|
open-synergy/bank-payment,sergio-incaser/bank-payment,hbrunn/bank-payment,sergio-teruel/bank-payment,ndtran/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,sergiocorato/bank-payment,damdam-s/bank-payment,CompassionCH/bank-payment,CompassionCH/bank-payment,incaser/bank-payment,Antiun/bank-payment,sergio-teruel/bank-payment,damdam-s/bank-payment,syci/bank-payment,sergio-incaser/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,Antiun/bank-payment,sergiocorato/bank-payment,ndtran/bank-payment,acsone/bank-payment,syci/bank-payment,diagramsoftware/bank-payment
|
python
|
## Code Before:
from . import wizard
from . import models
## Instruction:
Remove import models from init in sepa_credit_transfer
## Code After:
from . import wizard
|
# ... existing code ...
from . import wizard
# ... rest of the code ...
|
2fc8c076a763832043ac46ca6b163e2c66bed996
|
src/scenarios/bank_account_debit_order/bank_account_debit_order.java
|
src/scenarios/bank_account_debit_order/bank_account_debit_order.java
|
package bank_account_debit_order;
import com.balancedpayments.Balanced;
import com.balancedpayments.BankAccount;
import com.balancedpayments.Debit;
import com.balancedpayments.Order;
import com.balancedpayments.errors.HTTPError;
import com.balancedpayments.errors.MultipleResultsFound;
import com.balancedpayments.errors.NoResultsFound;
import java.util.HashMap;
public class bank_account_debit_order {
public static void main(String[] args) throws HTTPError, NoResultsFound, MultipleResultsFound {
Balanced.configure("ak-test-25ZY8HQwZPuQtDecrxb671LilUya5t5G0");
BankAccount bankAccount = new BankAccount("/bank_accounts/BA17zYxBNrmg9isvicjz9Ae4");
Order order = new Order("/orders/OR5sl2RJVnbwEf45nq5eATdz");
HashMap<String, Object> payload = new HashMap<String, Object>();
payload.put("amount", 5000);
try {
Debit debit = order.debitFrom(bankAccount, payload);
}
catch (HTTPError e) {}
}
}
|
package bank_account_debit_order;
import com.balancedpayments.*;
import com.balancedpayments.errors.*;
import java.util.HashMap;
import java.util.Map;
public class bank_account_debit_order {
public static void main(String[] args) throws HTTPError, NoResultsFound, MultipleResultsFound {
Balanced.configure("ak-test-25ZY8HQwZPuQtDecrxb671LilUya5t5G0");
BankAccount bankAccount = new BankAccount("/bank_accounts/BA17zYxBNrmg9isvicjz9Ae4");
Order order = new Order("/orders/OR5sl2RJVnbwEf45nq5eATdz");
HashMap<String, Object> payload = new HashMap<String, Object>();
payload.put("amount", 5000);
try {
Debit debit = order.debitFrom(bankAccount, payload);
}
catch (HTTPError e) {}
}
}
|
Edit bank account debit order imports
|
Edit bank account debit order imports
|
Java
|
mit
|
balanced/balanced-java
|
java
|
## Code Before:
package bank_account_debit_order;
import com.balancedpayments.Balanced;
import com.balancedpayments.BankAccount;
import com.balancedpayments.Debit;
import com.balancedpayments.Order;
import com.balancedpayments.errors.HTTPError;
import com.balancedpayments.errors.MultipleResultsFound;
import com.balancedpayments.errors.NoResultsFound;
import java.util.HashMap;
public class bank_account_debit_order {
public static void main(String[] args) throws HTTPError, NoResultsFound, MultipleResultsFound {
Balanced.configure("ak-test-25ZY8HQwZPuQtDecrxb671LilUya5t5G0");
BankAccount bankAccount = new BankAccount("/bank_accounts/BA17zYxBNrmg9isvicjz9Ae4");
Order order = new Order("/orders/OR5sl2RJVnbwEf45nq5eATdz");
HashMap<String, Object> payload = new HashMap<String, Object>();
payload.put("amount", 5000);
try {
Debit debit = order.debitFrom(bankAccount, payload);
}
catch (HTTPError e) {}
}
}
## Instruction:
Edit bank account debit order imports
## Code After:
package bank_account_debit_order;
import com.balancedpayments.*;
import com.balancedpayments.errors.*;
import java.util.HashMap;
import java.util.Map;
public class bank_account_debit_order {
public static void main(String[] args) throws HTTPError, NoResultsFound, MultipleResultsFound {
Balanced.configure("ak-test-25ZY8HQwZPuQtDecrxb671LilUya5t5G0");
BankAccount bankAccount = new BankAccount("/bank_accounts/BA17zYxBNrmg9isvicjz9Ae4");
Order order = new Order("/orders/OR5sl2RJVnbwEf45nq5eATdz");
HashMap<String, Object> payload = new HashMap<String, Object>();
payload.put("amount", 5000);
try {
Debit debit = order.debitFrom(bankAccount, payload);
}
catch (HTTPError e) {}
}
}
|
// ... existing code ...
package bank_account_debit_order;
import com.balancedpayments.*;
import com.balancedpayments.errors.*;
import java.util.HashMap;
import java.util.Map;
public class bank_account_debit_order {
// ... rest of the code ...
|
75cb98c88859d38eeec10121fdaee1ff9e4be18b
|
src/main/java/normalization/Normalization.java
|
src/main/java/normalization/Normalization.java
|
/**
*
*/
package normalization;
import datastructures.AttributeJoint;
import datastructures.DFJoint;
import dependency.ADependency;
import dependency.FunctionalDependency;
/**
* @author Pavel Nichita
*
*/
public final class Normalization {
/**
* Calculates all attributes that are being implied by {@code attrJoint} in
* {@code dfJoint}.
*
* The algorithm used is Ullman.
*
* @param attrJoint Attribute joint to who'm calculate ullman.
* @param dfJoint Where to calculate.
* @return an attribute joint with all the attributes implied.
*/
public static AttributeJoint simpleUllman(AttributeJoint attrJoint, DFJoint dfJoint){
AttributeJoint result = new AttributeJoint(attrJoint);
boolean isChanged;
do {
isChanged = false;
for (ADependency df:dfJoint) {
if (df.getClass() == new FunctionalDependency().getClass())
if (df.getAntecedent().isContained(result) && !(df.getConsequent().isContained(result))) {
result.addAttributes(df.getConsequent());
isChanged = true;
}
}
} while (isChanged);
return result;
}
}
|
/**
*
*/
package normalization;
import datastructures.AttributeJoint;
import datastructures.DFJoint;
import dependency.ADependency;
import dependency.FunctionalDependency;
/**
* @author Pavel Nichita
*
*/
public final class Normalization {
private Normalization() {
// Private constructor to hide the implicit public one.
}
/**
* Calculates all attributes that are being implied by {@code attrJoint} in
* {@code dfJoint}.
*
* The algorithm used is Ullman.
*
* @param attrJoint Attribute joint to who'm calculate ullman.
* @param dfJoint Where to calculate.
* @return an attribute joint with all the attributes implied.
*/
public static AttributeJoint simpleUllman(AttributeJoint attrJoint, DFJoint dfJoint){
AttributeJoint result = new AttributeJoint(attrJoint);
boolean isChanged;
do {
isChanged = false;
for (ADependency df:dfJoint) {
if (df.getClass() == new FunctionalDependency().getClass()) {
if (df.getAntecedent().isContained(result) && !(df.getConsequent().isContained(result))) {
result.addAttributes(df.getConsequent());
isChanged = true;
}
}
}
} while (isChanged);
return result;
}
}
|
Fix SonarQube issues: not all of them.
|
Fix SonarQube issues: not all of them.
|
Java
|
mpl-2.0
|
nichitapavel/project
|
java
|
## Code Before:
/**
*
*/
package normalization;
import datastructures.AttributeJoint;
import datastructures.DFJoint;
import dependency.ADependency;
import dependency.FunctionalDependency;
/**
* @author Pavel Nichita
*
*/
public final class Normalization {
/**
* Calculates all attributes that are being implied by {@code attrJoint} in
* {@code dfJoint}.
*
* The algorithm used is Ullman.
*
* @param attrJoint Attribute joint to who'm calculate ullman.
* @param dfJoint Where to calculate.
* @return an attribute joint with all the attributes implied.
*/
public static AttributeJoint simpleUllman(AttributeJoint attrJoint, DFJoint dfJoint){
AttributeJoint result = new AttributeJoint(attrJoint);
boolean isChanged;
do {
isChanged = false;
for (ADependency df:dfJoint) {
if (df.getClass() == new FunctionalDependency().getClass())
if (df.getAntecedent().isContained(result) && !(df.getConsequent().isContained(result))) {
result.addAttributes(df.getConsequent());
isChanged = true;
}
}
} while (isChanged);
return result;
}
}
## Instruction:
Fix SonarQube issues: not all of them.
## Code After:
/**
*
*/
package normalization;
import datastructures.AttributeJoint;
import datastructures.DFJoint;
import dependency.ADependency;
import dependency.FunctionalDependency;
/**
* @author Pavel Nichita
*
*/
public final class Normalization {
private Normalization() {
// Private constructor to hide the implicit public one.
}
/**
* Calculates all attributes that are being implied by {@code attrJoint} in
* {@code dfJoint}.
*
* The algorithm used is Ullman.
*
* @param attrJoint Attribute joint to who'm calculate ullman.
* @param dfJoint Where to calculate.
* @return an attribute joint with all the attributes implied.
*/
public static AttributeJoint simpleUllman(AttributeJoint attrJoint, DFJoint dfJoint){
AttributeJoint result = new AttributeJoint(attrJoint);
boolean isChanged;
do {
isChanged = false;
for (ADependency df:dfJoint) {
if (df.getClass() == new FunctionalDependency().getClass()) {
if (df.getAntecedent().isContained(result) && !(df.getConsequent().isContained(result))) {
result.addAttributes(df.getConsequent());
isChanged = true;
}
}
}
} while (isChanged);
return result;
}
}
|
# ... existing code ...
*
*/
public final class Normalization {
private Normalization() {
// Private constructor to hide the implicit public one.
}
/**
* Calculates all attributes that are being implied by {@code attrJoint} in
# ... modified code ...
do {
isChanged = false;
for (ADependency df:dfJoint) {
if (df.getClass() == new FunctionalDependency().getClass()) {
if (df.getAntecedent().isContained(result) && !(df.getConsequent().isContained(result))) {
result.addAttributes(df.getConsequent());
isChanged = true;
}
}
}
} while (isChanged);
# ... rest of the code ...
|
e60563e28ce08a850809aef696a348c84359ece2
|
gore/tests/test_api.py
|
gore/tests/test_api.py
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
Add search to events API
|
Add search to events API
|
Python
|
mit
|
akx/gentry,akx/gentry,akx/gentry,akx/gentry
|
python
|
## Code Before:
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
## Instruction:
Add search to events API
## Code After:
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
# ... existing code ...
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
# ... rest of the code ...
|
f36a1bb6c9229615d1cc498c02fb7df066e7cd1c
|
app/main/views/_templates.py
|
app/main/views/_templates.py
|
templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
|
templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
Make SMS templates plausible for hack day
|
Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one without
|
Python
|
mit
|
alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin
|
python
|
## Code Before:
templates = [
{
'type': 'sms',
'name': 'Confirmation',
'body': 'Lasting power of attorney: We’ve received your application. Applications take between 8 and 10 weeks to process.' # noqa
},
{
'type': 'sms',
'name': 'Reminder',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) expires on ((date)). Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'sms',
'name': 'Warning',
'body': 'Vehicle tax: Your vehicle tax for ((registration number)) has expired. Tax your vehicle at www.gov.uk/vehicle-tax' # noqa
},
{
'type': 'email',
'name': 'Application alert 06/2016',
'subject': 'Your lasting power of attorney application',
'body': """Dear ((name)),
When you’ve made your lasting power of attorney (LPA), you need to register it \
with the Office of the Public Guardian (OPG).
You can apply to register your LPA yourself if you’re able to make your own decisions.
Your attorney can also register it for you. You’ll be told if they do and you can \
object to the registration.
It takes between 8 and 10 weeks to register an LPA if there are no mistakes in the application.
"""
},
{
'type': 'sms',
'name': 'Air quality alert',
'body': 'Air pollution levels will be ((level)) in ((region)) tomorrow.'
},
]
## Instruction:
Make SMS templates plausible for hack day
This commit replaces the previous SMS templates.
I’ve written a couple of new ones which are plausible for developers on the
hack day:
- one with placeholders
- one without
## Code After:
templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
|
...
templates = [
{
'type': 'sms',
'name': 'Confirmation with details Jan 2016',
'body': '((name)), we’ve received your ((thing)). We’ll contact you again within 1 week.'
},
{
'type': 'sms',
'name': 'Confirmation Jan 2016',
'body': 'We’ve received your payment. We’ll contact you again within 1 week.'
}
]
...
|
8daf5c8402a981942165d62ccb6057a26ad73012
|
cms/tests/fixture_loading.py
|
cms/tests/fixture_loading.py
|
import tempfile
import codecs
try:
from cStringIO import StringIO
except:
from io import StringIO
from django.core.management import call_command
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.models import Page
class FixtureTestCase(NavextendersFixture, SettingsOverrideTestCase):
def test_fixture_load(self):
"""
This test dumps a live set of pages, cleanup the database and load it
again.
This makes fixtures unnecessary and it's easier to maintain.
"""
output = StringIO()
dump = tempfile.mkstemp(".json")
call_command('dumpdata', 'cms', indent=3, stdout=output)
Page.objects.all().delete()
output.seek(0)
with codecs.open(dump[1], 'w', 'utf-8') as dumpfile:
dumpfile.write(output.read())
self.assertEqual(0, Page.objects.count())
# Transaction disable, otherwise the connection it the test would be
# isolated from the data loaded in the different command connection
call_command('loaddata', dump[1], commit=False, stdout=output)
self.assertEqual(10, Page.objects.count())
|
import tempfile
import codecs
try:
from cStringIO import StringIO
except:
from io import StringIO
from django.core.management import call_command
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.models import Page, Placeholder, CMSPlugin
class FixtureTestCase(NavextendersFixture, SettingsOverrideTestCase):
def test_fixture_load(self):
"""
This test dumps a live set of pages, cleanup the database and load it
again.
This makes fixtures unnecessary and it's easier to maintain.
"""
output = StringIO()
dump = tempfile.mkstemp(".json")
call_command('dumpdata', 'cms', indent=3, stdout=output)
original_ph = Placeholder.objects.count()
original_pages = Page.objects.count()
original_plugins = CMSPlugin.objects.count()
Page.objects.all().delete()
output.seek(0)
with codecs.open(dump[1], 'w', 'utf-8') as dumpfile:
dumpfile.write(output.read())
self.assertEqual(0, Page.objects.count())
self.assertEqual(0, Placeholder.objects.count())
# Transaction disable, otherwise the connection it the test would be
# isolated from the data loaded in the different command connection
call_command('loaddata', dump[1], commit=False, stdout=output)
self.assertEqual(10, Page.objects.count())
self.assertEqual(original_pages, Page.objects.count())
# Placeholder number may differ if signals does not correctly handle
# load data command
self.assertEqual(original_ph, Placeholder.objects.count())
self.assertEqual(original_plugins, CMSPlugin.objects.count())
|
Change test_fixture_load to check for rescanned placeholders
|
Change test_fixture_load to check for rescanned placeholders
|
Python
|
bsd-3-clause
|
stefanw/django-cms,bittner/django-cms,robmagee/django-cms,jsma/django-cms,jeffreylu9/django-cms,AlexProfi/django-cms,Jaccorot/django-cms,yakky/django-cms,memnonila/django-cms,intgr/django-cms,andyzsf/django-cms,intip/django-cms,leture/django-cms,jrief/django-cms,SachaMPS/django-cms,SofiaReis/django-cms,Livefyre/django-cms,liuyisiyisi/django-cms,ScholzVolkmer/django-cms,datakortet/django-cms,divio/django-cms,FinalAngel/django-cms,iddqd1/django-cms,MagicSolutions/django-cms,wuzhihui1123/django-cms,intip/django-cms,datakortet/django-cms,DylannCordel/django-cms,netzkolchose/django-cms,evildmp/django-cms,saintbird/django-cms,iddqd1/django-cms,liuyisiyisi/django-cms,isotoma/django-cms,owers19856/django-cms,divio/django-cms,360youlun/django-cms,leture/django-cms,donce/django-cms,intgr/django-cms,memnonila/django-cms,astagi/django-cms,wuzhihui1123/django-cms,dhorelik/django-cms,rryan/django-cms,divio/django-cms,vad/django-cms,iddqd1/django-cms,stefanfoulis/django-cms,mkoistinen/django-cms,keimlink/django-cms,SachaMPS/django-cms,selecsosi/django-cms,mkoistinen/django-cms,wuzhihui1123/django-cms,Vegasvikk/django-cms,cyberintruder/django-cms,frnhr/django-cms,rsalmaso/django-cms,SmithsonianEnterprises/django-cms,stefanw/django-cms,czpython/django-cms,sznekol/django-cms,takeshineshiro/django-cms,jsma/django-cms,wyg3958/django-cms,vstoykov/django-cms,qnub/django-cms,rsalmaso/django-cms,youprofit/django-cms,saintbird/django-cms,rryan/django-cms,Vegasvikk/django-cms,rsalmaso/django-cms,stefanfoulis/django-cms,rscnt/django-cms,petecummings/django-cms,donce/django-cms,datakortet/django-cms,benzkji/django-cms,benzkji/django-cms,dhorelik/django-cms,irudayarajisawa/django-cms,sephii/django-cms,jproffitt/django-cms,isotoma/django-cms,czpython/django-cms,timgraham/django-cms,qnub/django-cms,youprofit/django-cms,FinalAngel/django-cms,chmberl/django-cms,philippze/django-cms,chkir/django-cms,isotoma/django-cms,Jaccorot/django-cms,cyberintruder/django-cms,takeshineshiro/django-cms,vad/django-cms,selecsosi/django-cms,rscnt/django-cms,jrief/django-cms,robmagee/django-cms,farhaadila/django-cms,sephii/django-cms,qnub/django-cms,DylannCordel/django-cms,jproffitt/django-cms,Vegasvikk/django-cms,jrief/django-cms,keimlink/django-cms,chmberl/django-cms,AlexProfi/django-cms,yakky/django-cms,frnhr/django-cms,chmberl/django-cms,360youlun/django-cms,selecsosi/django-cms,vad/django-cms,irudayarajisawa/django-cms,jeffreylu9/django-cms,SmithsonianEnterprises/django-cms,selecsosi/django-cms,farhaadila/django-cms,Livefyre/django-cms,vxsx/django-cms,benzkji/django-cms,andyzsf/django-cms,dhorelik/django-cms,bittner/django-cms,MagicSolutions/django-cms,netzkolchose/django-cms,donce/django-cms,astagi/django-cms,frnhr/django-cms,rryan/django-cms,cyberintruder/django-cms,chkir/django-cms,360youlun/django-cms,SofiaReis/django-cms,nostalgiaz/django-cms,chkir/django-cms,timgraham/django-cms,webu/django-cms,vad/django-cms,bittner/django-cms,jrclaramunt/django-cms,netzkolchose/django-cms,frnhr/django-cms,vstoykov/django-cms,philippze/django-cms,Livefyre/django-cms,rryan/django-cms,astagi/django-cms,netzkolchose/django-cms,ScholzVolkmer/django-cms,kk9599/django-cms,divio/django-cms,bittner/django-cms,sephii/django-cms,nostalgiaz/django-cms,memnonila/django-cms,evildmp/django-cms,sznekol/django-cms,takeshineshiro/django-cms,czpython/django-cms,leture/django-cms,webu/django-cms,benzkji/django-cms,intip/django-cms,petecummings/django-cms,josjevv/django-cms,youprofit/django-cms,nimbis/django-cms,SofiaReis/django-cms,ScholzVolkmer/django-cms,rsalmaso/django-cms,jeffreylu9/django-cms,mkoistinen/django-cms,nimbis/django-cms,Jaccorot/django-cms,intgr/django-cms,evildmp/django-cms,andyzsf/django-cms,stefanw/django-cms,jproffitt/django-cms,josjevv/django-cms,intgr/django-cms,Livefyre/django-cms,farhaadila/django-cms,josjevv/django-cms,jrclaramunt/django-cms,sephii/django-cms,SachaMPS/django-cms,stefanfoulis/django-cms,robmagee/django-cms,wyg3958/django-cms,mkoistinen/django-cms,datakortet/django-cms,wyg3958/django-cms,jrief/django-cms,jsma/django-cms,SmithsonianEnterprises/django-cms,philippze/django-cms,DylannCordel/django-cms,stefanfoulis/django-cms,jeffreylu9/django-cms,vxsx/django-cms,liuyisiyisi/django-cms,sznekol/django-cms,owers19856/django-cms,kk9599/django-cms,vstoykov/django-cms,stefanw/django-cms,andyzsf/django-cms,jrclaramunt/django-cms,irudayarajisawa/django-cms,evildmp/django-cms,czpython/django-cms,jproffitt/django-cms,nostalgiaz/django-cms,AlexProfi/django-cms,nimbis/django-cms,isotoma/django-cms,FinalAngel/django-cms,vxsx/django-cms,wuzhihui1123/django-cms,kk9599/django-cms,nostalgiaz/django-cms,FinalAngel/django-cms,MagicSolutions/django-cms,jsma/django-cms,vxsx/django-cms,yakky/django-cms,intip/django-cms,saintbird/django-cms,owers19856/django-cms,nimbis/django-cms,webu/django-cms,petecummings/django-cms,rscnt/django-cms,yakky/django-cms,keimlink/django-cms,timgraham/django-cms
|
python
|
## Code Before:
import tempfile
import codecs
try:
from cStringIO import StringIO
except:
from io import StringIO
from django.core.management import call_command
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.models import Page
class FixtureTestCase(NavextendersFixture, SettingsOverrideTestCase):
def test_fixture_load(self):
"""
This test dumps a live set of pages, cleanup the database and load it
again.
This makes fixtures unnecessary and it's easier to maintain.
"""
output = StringIO()
dump = tempfile.mkstemp(".json")
call_command('dumpdata', 'cms', indent=3, stdout=output)
Page.objects.all().delete()
output.seek(0)
with codecs.open(dump[1], 'w', 'utf-8') as dumpfile:
dumpfile.write(output.read())
self.assertEqual(0, Page.objects.count())
# Transaction disable, otherwise the connection it the test would be
# isolated from the data loaded in the different command connection
call_command('loaddata', dump[1], commit=False, stdout=output)
self.assertEqual(10, Page.objects.count())
## Instruction:
Change test_fixture_load to check for rescanned placeholders
## Code After:
import tempfile
import codecs
try:
from cStringIO import StringIO
except:
from io import StringIO
from django.core.management import call_command
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.models import Page, Placeholder, CMSPlugin
class FixtureTestCase(NavextendersFixture, SettingsOverrideTestCase):
def test_fixture_load(self):
"""
This test dumps a live set of pages, cleanup the database and load it
again.
This makes fixtures unnecessary and it's easier to maintain.
"""
output = StringIO()
dump = tempfile.mkstemp(".json")
call_command('dumpdata', 'cms', indent=3, stdout=output)
original_ph = Placeholder.objects.count()
original_pages = Page.objects.count()
original_plugins = CMSPlugin.objects.count()
Page.objects.all().delete()
output.seek(0)
with codecs.open(dump[1], 'w', 'utf-8') as dumpfile:
dumpfile.write(output.read())
self.assertEqual(0, Page.objects.count())
self.assertEqual(0, Placeholder.objects.count())
# Transaction disable, otherwise the connection it the test would be
# isolated from the data loaded in the different command connection
call_command('loaddata', dump[1], commit=False, stdout=output)
self.assertEqual(10, Page.objects.count())
self.assertEqual(original_pages, Page.objects.count())
# Placeholder number may differ if signals does not correctly handle
# load data command
self.assertEqual(original_ph, Placeholder.objects.count())
self.assertEqual(original_plugins, CMSPlugin.objects.count())
|
# ... existing code ...
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import SettingsOverrideTestCase
from cms.models import Page, Placeholder, CMSPlugin
class FixtureTestCase(NavextendersFixture, SettingsOverrideTestCase):
# ... modified code ...
output = StringIO()
dump = tempfile.mkstemp(".json")
call_command('dumpdata', 'cms', indent=3, stdout=output)
original_ph = Placeholder.objects.count()
original_pages = Page.objects.count()
original_plugins = CMSPlugin.objects.count()
Page.objects.all().delete()
output.seek(0)
with codecs.open(dump[1], 'w', 'utf-8') as dumpfile:
...
dumpfile.write(output.read())
self.assertEqual(0, Page.objects.count())
self.assertEqual(0, Placeholder.objects.count())
# Transaction disable, otherwise the connection it the test would be
# isolated from the data loaded in the different command connection
call_command('loaddata', dump[1], commit=False, stdout=output)
self.assertEqual(10, Page.objects.count())
self.assertEqual(original_pages, Page.objects.count())
# Placeholder number may differ if signals does not correctly handle
# load data command
self.assertEqual(original_ph, Placeholder.objects.count())
self.assertEqual(original_plugins, CMSPlugin.objects.count())
# ... rest of the code ...
|
05ecac15aa9b975bd83378daa0560182041bef97
|
src/format_go_correspondance.py
|
src/format_go_correspondance.py
|
import sys
import os
import argparse
import re
def format_go_correspondance(args):
go = {}
with open(args.go_correspondance_input, "r") as go_correspondance_input:
for line in go_correspondance_input.readlines():
split_line = line[:-1].split('\t')
go_name = split_line[0]
if split_line[1] == "":
continue
slim_go_names = split_line[1].split(';')
for split_go_name in slim_go_names:
go.setdefault(split_go_name,[]).append(go_name)
with open(args.go_correspondance_output,"w") as go_correspondance_output:
for go_name in go:
go_correspondance_output.write(go_name + '\t')
go_correspondance_output.write("\t".join(go[go_name]) + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--go_correspondance_input', required=True)
parser.add_argument('--go_correspondance_output', required=True)
args = parser.parse_args()
format_go_correspondance(args)
|
import sys
import os
import argparse
import re
def format_go_correspondance(args):
go = {}
with open(args.go_correspondance_input, "r") as go_correspondance_input:
for line in go_correspondance_input.readlines():
if not line.startswith('GO'):
continue
print line
split_line = line[:-1].split('\t')
go_name = split_line[0]
if split_line[1] == "":
continue
slim_go_names = split_line[1].split(';')
for split_go_name in slim_go_names:
go.setdefault(split_go_name,[]).append(go_name)
with open(args.go_correspondance_output,"w") as go_correspondance_output:
for go_name in go:
go_correspondance_output.write(go_name + '\t')
go_correspondance_output.write("\t".join(go[go_name]) + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--go_correspondance_input', required=True)
parser.add_argument('--go_correspondance_output', required=True)
args = parser.parse_args()
format_go_correspondance(args)
|
Add test for go correspondance file begin
|
Add test for go correspondance file begin
|
Python
|
apache-2.0
|
ASaiM/group_humann2_uniref_abundances_to_GO,ASaiM/group_humann2_uniref_abundances_to_GO
|
python
|
## Code Before:
import sys
import os
import argparse
import re
def format_go_correspondance(args):
go = {}
with open(args.go_correspondance_input, "r") as go_correspondance_input:
for line in go_correspondance_input.readlines():
split_line = line[:-1].split('\t')
go_name = split_line[0]
if split_line[1] == "":
continue
slim_go_names = split_line[1].split(';')
for split_go_name in slim_go_names:
go.setdefault(split_go_name,[]).append(go_name)
with open(args.go_correspondance_output,"w") as go_correspondance_output:
for go_name in go:
go_correspondance_output.write(go_name + '\t')
go_correspondance_output.write("\t".join(go[go_name]) + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--go_correspondance_input', required=True)
parser.add_argument('--go_correspondance_output', required=True)
args = parser.parse_args()
format_go_correspondance(args)
## Instruction:
Add test for go correspondance file begin
## Code After:
import sys
import os
import argparse
import re
def format_go_correspondance(args):
go = {}
with open(args.go_correspondance_input, "r") as go_correspondance_input:
for line in go_correspondance_input.readlines():
if not line.startswith('GO'):
continue
print line
split_line = line[:-1].split('\t')
go_name = split_line[0]
if split_line[1] == "":
continue
slim_go_names = split_line[1].split(';')
for split_go_name in slim_go_names:
go.setdefault(split_go_name,[]).append(go_name)
with open(args.go_correspondance_output,"w") as go_correspondance_output:
for go_name in go:
go_correspondance_output.write(go_name + '\t')
go_correspondance_output.write("\t".join(go[go_name]) + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--go_correspondance_input', required=True)
parser.add_argument('--go_correspondance_output', required=True)
args = parser.parse_args()
format_go_correspondance(args)
|
# ... existing code ...
go = {}
with open(args.go_correspondance_input, "r") as go_correspondance_input:
for line in go_correspondance_input.readlines():
if not line.startswith('GO'):
continue
print line
split_line = line[:-1].split('\t')
go_name = split_line[0]
# ... rest of the code ...
|
17d61aa9fbf084546dea94a6acf59713a869362b
|
OpenRP/Core/src/main/java/pnb/orp/services/CharacterCreationService.java
|
OpenRP/Core/src/main/java/pnb/orp/services/CharacterCreationService.java
|
package pnb.orp.services;
import java.util.UUID;
public interface CharacterCreationService {
void createCharacter(UUID player, String cardName);
//How to add items to the character in an extensible way.
void finishCharacter(UUID player);
void cancelCharacter(UUID player);
}
|
package pnb.orp.services;
import java.util.UUID;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import pnb.utils.services.DatabaseService;
@Singleton
public class CharacterCreationService {
private final DatabaseService dbs;
@Inject
public CharacterCreationService(DatabaseService dbs) {
this.dbs = dbs;
}
void createCharacter(UUID player, String cardName) {
}
//How to add items to the character in an extensible way.
void finishCharacter(UUID player) {
}
void cancelCharacter(UUID player) {
}
}
|
Convert Character Creation Service from an interface to a class.
|
Convert Character Creation Service from an interface to a class.
|
Java
|
mit
|
Roxforbraynz/mods,Roxforbraynz/mods
|
java
|
## Code Before:
package pnb.orp.services;
import java.util.UUID;
public interface CharacterCreationService {
void createCharacter(UUID player, String cardName);
//How to add items to the character in an extensible way.
void finishCharacter(UUID player);
void cancelCharacter(UUID player);
}
## Instruction:
Convert Character Creation Service from an interface to a class.
## Code After:
package pnb.orp.services;
import java.util.UUID;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import pnb.utils.services.DatabaseService;
@Singleton
public class CharacterCreationService {
private final DatabaseService dbs;
@Inject
public CharacterCreationService(DatabaseService dbs) {
this.dbs = dbs;
}
void createCharacter(UUID player, String cardName) {
}
//How to add items to the character in an extensible way.
void finishCharacter(UUID player) {
}
void cancelCharacter(UUID player) {
}
}
|
// ... existing code ...
import java.util.UUID;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import pnb.utils.services.DatabaseService;
@Singleton
public class CharacterCreationService {
private final DatabaseService dbs;
@Inject
public CharacterCreationService(DatabaseService dbs) {
this.dbs = dbs;
}
void createCharacter(UUID player, String cardName) {
}
//How to add items to the character in an extensible way.
void finishCharacter(UUID player) {
}
void cancelCharacter(UUID player) {
}
}
// ... rest of the code ...
|
dec3aaaefe2afdf4d3ce19dc808257ea49cc2b00
|
hsml.py
|
hsml.py
|
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
|
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
Fix for old numpy versions without cbrt
|
Fix for old numpy versions without cbrt
|
Python
|
mit
|
sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra
|
python
|
## Code Before:
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
return radius
## Instruction:
Fix for old numpy versions without cbrt
## Code After:
"""A small module for computing the smoothing length of a Gadget/Arepo simulation."""
import numpy as np
def get_smooth_length(bar):
"""Figures out if the particles are from AREPO or GADGET
and computes the smoothing length.
Note the Volume array in HDF5 is comoving and this returns a comoving smoothing length
The SPH kernel definition used in Gadget (Price 2011: arxiv 1012.1885)
gives a normalisation so that rho_p = m_p / h^3
So the smoothing length for Arepo is Volume^{1/3}
For gadget the kernel is defined so that the smoothing length is 2*h.
Arguments:
Baryon particles from a simulation
Returns:
Array of smoothing lengths in code units.
"""
#Are we arepo? If we are a modern version we should have this array.
try:
radius = np.cbrt(bar["Volume"], dtype=np.float32)
except KeyError:
#If we don't have a Volume array we are gadget, and
#the SmoothingLength array is actually the smoothing length.
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
|
...
#There is a different kernel definition, as in gadget the kernel goes from 0 to 2,
#whereas I put it between zero and 1.
radius=np.array(bar["SmoothingLength"],dtype=np.float32)/2
except AttributeError:
#This is for really old numpys without cbrts
radius = np.power(bar["Volume"], 1./3, dtype=np.float32)
return radius
...
|
7b72dbb331c120eb5657ce9a81e725c550779485
|
dataportal/broker/__init__.py
|
dataportal/broker/__init__.py
|
from .simple_broker import _DataBrokerClass, EventQueue, Header
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton
register_builtin_handlers()
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton
register_builtin_handlers()
|
Add Errors to the public API.
|
DOC: Add Errors to the public API.
|
Python
|
bsd-3-clause
|
danielballan/dataportal,ericdill/datamuxer,tacaswell/dataportal,ericdill/datamuxer,tacaswell/dataportal,NSLS-II/dataportal,danielballan/datamuxer,danielballan/datamuxer,ericdill/databroker,NSLS-II/datamuxer,danielballan/dataportal,NSLS-II/dataportal,ericdill/databroker
|
python
|
## Code Before:
from .simple_broker import _DataBrokerClass, EventQueue, Header
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton
register_builtin_handlers()
## Instruction:
DOC: Add Errors to the public API.
## Code After:
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton
register_builtin_handlers()
|
// ... existing code ...
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton
// ... rest of the code ...
|
8cab1d360218f6d8075bad08fd38ef90c75e5549
|
turbustat/tests/setup_package.py
|
turbustat/tests/setup_package.py
|
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
|
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
|
Add coveragerc to package data
|
Add coveragerc to package data
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
python
|
## Code Before:
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz']
}
## Instruction:
Add coveragerc to package data
## Code After:
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
|
// ... existing code ...
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['data/*.fits', 'data/*.npz',
'coveragerc']
}
// ... rest of the code ...
|
6f7890c8b29670f613b6a551ebac2b383f3a7a64
|
tests/test_recipes.py
|
tests/test_recipes.py
|
import unittest
from brew.constants import IMPERIAL_UNITS
from brew.constants import SI_UNITS
from brew.recipes import Recipe
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
class TestRecipe(unittest.TestCase):
def setUp(self):
# Define Grains
self.grain_additions = grain_additions
# Define Hops
self.hop_additions = hop_additions
# Define Recipes
self.recipe = recipe
def test_str(self):
out = str(self.recipe)
self.assertEquals(out, 'pale ale')
def test_set_units(self):
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
self.recipe.set_units(SI_UNITS)
self.assertEquals(self.recipe.units, SI_UNITS)
def test_set_raises(self):
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_validate(self):
data = self.recipe.to_dict()
Recipe.validate(data)
|
import unittest
from brew.constants import IMPERIAL_UNITS
from brew.constants import SI_UNITS
from brew.recipes import Recipe
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
from fixtures import yeast
class TestRecipe(unittest.TestCase):
def setUp(self):
# Define Grains
self.grain_additions = grain_additions
# Define Hops
self.hop_additions = hop_additions
# Define Yeast
self.yeast = yeast
# Define Recipes
self.recipe = recipe
def test_str(self):
out = str(self.recipe)
self.assertEquals(out, 'pale ale')
def test_set_units(self):
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
self.recipe.set_units(SI_UNITS)
self.assertEquals(self.recipe.units, SI_UNITS)
def test_set_raises(self):
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_grains_units_mismatch_raises(self):
grain_additions = [g.change_units() for g in self.grain_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=grain_additions,
hop_additions=self.hop_additions,
yeast=self.yeast)
def test_hops_units_mismatch_raises(self):
hop_additions = [h.change_units() for h in self.hop_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=self.grain_additions,
hop_additions=hop_additions,
yeast=self.yeast)
def test_validate(self):
data = self.recipe.to_dict()
Recipe.validate(data)
|
Test units mismatch in recipe
|
Test units mismatch in recipe
|
Python
|
mit
|
chrisgilmerproj/brewday,chrisgilmerproj/brewday
|
python
|
## Code Before:
import unittest
from brew.constants import IMPERIAL_UNITS
from brew.constants import SI_UNITS
from brew.recipes import Recipe
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
class TestRecipe(unittest.TestCase):
def setUp(self):
# Define Grains
self.grain_additions = grain_additions
# Define Hops
self.hop_additions = hop_additions
# Define Recipes
self.recipe = recipe
def test_str(self):
out = str(self.recipe)
self.assertEquals(out, 'pale ale')
def test_set_units(self):
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
self.recipe.set_units(SI_UNITS)
self.assertEquals(self.recipe.units, SI_UNITS)
def test_set_raises(self):
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_validate(self):
data = self.recipe.to_dict()
Recipe.validate(data)
## Instruction:
Test units mismatch in recipe
## Code After:
import unittest
from brew.constants import IMPERIAL_UNITS
from brew.constants import SI_UNITS
from brew.recipes import Recipe
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
from fixtures import yeast
class TestRecipe(unittest.TestCase):
def setUp(self):
# Define Grains
self.grain_additions = grain_additions
# Define Hops
self.hop_additions = hop_additions
# Define Yeast
self.yeast = yeast
# Define Recipes
self.recipe = recipe
def test_str(self):
out = str(self.recipe)
self.assertEquals(out, 'pale ale')
def test_set_units(self):
self.assertEquals(self.recipe.units, IMPERIAL_UNITS)
self.recipe.set_units(SI_UNITS)
self.assertEquals(self.recipe.units, SI_UNITS)
def test_set_raises(self):
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_grains_units_mismatch_raises(self):
grain_additions = [g.change_units() for g in self.grain_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=grain_additions,
hop_additions=self.hop_additions,
yeast=self.yeast)
def test_hops_units_mismatch_raises(self):
hop_additions = [h.change_units() for h in self.hop_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=self.grain_additions,
hop_additions=hop_additions,
yeast=self.yeast)
def test_validate(self):
data = self.recipe.to_dict()
Recipe.validate(data)
|
// ... existing code ...
from fixtures import grain_additions
from fixtures import hop_additions
from fixtures import recipe
from fixtures import yeast
class TestRecipe(unittest.TestCase):
// ... modified code ...
# Define Hops
self.hop_additions = hop_additions
# Define Yeast
self.yeast = yeast
# Define Recipes
self.recipe = recipe
...
with self.assertRaises(Exception):
self.recipe.set_units('bad')
def test_grains_units_mismatch_raises(self):
grain_additions = [g.change_units() for g in self.grain_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=grain_additions,
hop_additions=self.hop_additions,
yeast=self.yeast)
def test_hops_units_mismatch_raises(self):
hop_additions = [h.change_units() for h in self.hop_additions]
with self.assertRaises(Exception):
Recipe(name='pale ale',
grain_additions=self.grain_additions,
hop_additions=hop_additions,
yeast=self.yeast)
def test_validate(self):
data = self.recipe.to_dict()
Recipe.validate(data)
// ... rest of the code ...
|
12b46a902f1596c0559e6e7d3faf6ea7b812a800
|
api/radar_api/tests/conftest.py
|
api/radar_api/tests/conftest.py
|
import string
import random
import pytest
from radar_api.app import create_app
from radar.database import db
@pytest.fixture(scope='session')
def app():
return create_app({
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost'
})
@pytest.yield_fixture(scope='session')
def app_context(app):
with app.app_context() as app_context:
yield app_context
@pytest.fixture(scope='session')
def test_db(request, app_context):
db.drop_all()
db.create_all()
def teardown():
db.drop_all()
request.addfinalizer(teardown)
return db
@pytest.fixture
def transaction(request, app_context, test_db):
db.session.begin_nested()
def teardown():
db.session.rollback()
request.addfinalizer(teardown)
return db
@pytest.yield_fixture
def client(app, app_context):
with app.test_client() as client:
yield client
|
import string
import random
import pytest
from radar_api.app import create_app
from radar.database import db
@pytest.fixture(scope='session')
def app():
return create_app({
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost',
'UKRDC_PATIENT_SEARCH_URL': 'http://localhost:5101/search',
})
@pytest.yield_fixture(scope='session')
def app_context(app):
with app.app_context() as app_context:
yield app_context
@pytest.fixture(scope='session')
def test_db(request, app_context):
db.drop_all()
db.create_all()
def teardown():
db.drop_all()
request.addfinalizer(teardown)
return db
@pytest.fixture
def transaction(request, app_context, test_db):
db.session.begin_nested()
def teardown():
db.session.rollback()
request.addfinalizer(teardown)
return db
@pytest.yield_fixture
def client(app, app_context):
with app.test_client() as client:
yield client
|
Add UKRDC_PATIENT_SEARCH_URL to test app config
|
Add UKRDC_PATIENT_SEARCH_URL to test app config
|
Python
|
agpl-3.0
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
python
|
## Code Before:
import string
import random
import pytest
from radar_api.app import create_app
from radar.database import db
@pytest.fixture(scope='session')
def app():
return create_app({
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost'
})
@pytest.yield_fixture(scope='session')
def app_context(app):
with app.app_context() as app_context:
yield app_context
@pytest.fixture(scope='session')
def test_db(request, app_context):
db.drop_all()
db.create_all()
def teardown():
db.drop_all()
request.addfinalizer(teardown)
return db
@pytest.fixture
def transaction(request, app_context, test_db):
db.session.begin_nested()
def teardown():
db.session.rollback()
request.addfinalizer(teardown)
return db
@pytest.yield_fixture
def client(app, app_context):
with app.test_client() as client:
yield client
## Instruction:
Add UKRDC_PATIENT_SEARCH_URL to test app config
## Code After:
import string
import random
import pytest
from radar_api.app import create_app
from radar.database import db
@pytest.fixture(scope='session')
def app():
return create_app({
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost',
'UKRDC_PATIENT_SEARCH_URL': 'http://localhost:5101/search',
})
@pytest.yield_fixture(scope='session')
def app_context(app):
with app.app_context() as app_context:
yield app_context
@pytest.fixture(scope='session')
def test_db(request, app_context):
db.drop_all()
db.create_all()
def teardown():
db.drop_all()
request.addfinalizer(teardown)
return db
@pytest.fixture
def transaction(request, app_context, test_db):
db.session.begin_nested()
def teardown():
db.session.rollback()
request.addfinalizer(teardown)
return db
@pytest.yield_fixture
def client(app, app_context):
with app.test_client() as client:
yield client
|
...
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost',
'UKRDC_PATIENT_SEARCH_URL': 'http://localhost:5101/search',
})
...
|
237b66c8b9cef714b64a75b1f20a79a4357c71b5
|
apps/continiousauth/serializers.py
|
apps/continiousauth/serializers.py
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
|
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
Change serializer to omit dates
|
Change serializer to omit dates
|
Python
|
mit
|
larserikgk/mobiauth-server,larserikgk/mobiauth-server,larserikgk/mobiauth-server
|
python
|
## Code Before:
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag', 'start_time', 'end_time')
## Instruction:
Change serializer to omit dates
## Code After:
from rest_framework import serializers
from .models import AuthenticationSession
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
|
// ... existing code ...
class AuthenticationSessionSerializer(serializers.ModelSerializer):
class Meta:
model = AuthenticationSession
fields = ('application', 'external_session_id', 'session_photo_bytes', 'flag')
// ... rest of the code ...
|
8f1555a3d805da66d1cd70c2f68725756679f0fc
|
src/info/ata4/disunity/cli/DisUnityOptions.java
|
src/info/ata4/disunity/cli/DisUnityOptions.java
|
/*
** 2013 August 11
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.disunity.cli;
import com.beust.jcommander.Parameter;
/**
* DisUnity configuration class.
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class DisUnityOptions {
@Parameter(
names = {"-h", "--help"},
description = "Print this help.",
help = true
)
private boolean help;
@Parameter(
names = { "-v", "--verbose" },
description = "Show more verbose log output."
)
private boolean verbose;
@Parameter(
names = { "-f", "--output-format" },
description = "Set output text format."
)
private OutputFormat outputFormat;
public boolean isHelp() {
return help;
}
public boolean isVerbose() {
return verbose;
}
public OutputFormat getOutputFormat() {
return outputFormat;
}
}
|
/*
** 2013 August 11
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.disunity.cli;
import com.beust.jcommander.Parameter;
/**
* DisUnity configuration class.
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class DisUnityOptions {
@Parameter(
names = {"-h", "--help"},
description = "Print this help.",
help = true
)
private boolean help;
@Parameter(
names = { "-v", "--verbose" },
description = "Show more verbose log output."
)
private boolean verbose;
@Parameter(
names = { "-f", "--output-format" },
description = "Set output text format."
)
private OutputFormat outputFormat = OutputFormat.PLAINTEXT;
public boolean isHelp() {
return help;
}
public boolean isVerbose() {
return verbose;
}
public OutputFormat getOutputFormat() {
return outputFormat;
}
}
|
Set default output format to plain text
|
Set default output format to plain text
|
Java
|
unlicense
|
summertriangle-dev/disunity,catinred2/disunity,tomagoyaky/disunity,zodsoft/disunity,summertriangle-dev/disunity,bolabola/disunity,yclhehe/disunity,zhangf911/disunity,zchen39/disunity,catinred2/disunity,ata4/disunity,yclhehe/disunity,tomagoyaky/disunity,zchen39/disunity,bolabola/disunity,hozmaster/disunity,hozmaster/disunity,kennytm/disunity,KitoHo/disunity,v2tmobile/disunity,v2tmobile/disunity,zhangf911/disunity,ata4/disunity,kennytm/disunity,zodsoft/disunity,KitoHo/disunity
|
java
|
## Code Before:
/*
** 2013 August 11
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.disunity.cli;
import com.beust.jcommander.Parameter;
/**
* DisUnity configuration class.
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class DisUnityOptions {
@Parameter(
names = {"-h", "--help"},
description = "Print this help.",
help = true
)
private boolean help;
@Parameter(
names = { "-v", "--verbose" },
description = "Show more verbose log output."
)
private boolean verbose;
@Parameter(
names = { "-f", "--output-format" },
description = "Set output text format."
)
private OutputFormat outputFormat;
public boolean isHelp() {
return help;
}
public boolean isVerbose() {
return verbose;
}
public OutputFormat getOutputFormat() {
return outputFormat;
}
}
## Instruction:
Set default output format to plain text
## Code After:
/*
** 2013 August 11
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.disunity.cli;
import com.beust.jcommander.Parameter;
/**
* DisUnity configuration class.
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
public class DisUnityOptions {
@Parameter(
names = {"-h", "--help"},
description = "Print this help.",
help = true
)
private boolean help;
@Parameter(
names = { "-v", "--verbose" },
description = "Show more verbose log output."
)
private boolean verbose;
@Parameter(
names = { "-f", "--output-format" },
description = "Set output text format."
)
private OutputFormat outputFormat = OutputFormat.PLAINTEXT;
public boolean isHelp() {
return help;
}
public boolean isVerbose() {
return verbose;
}
public OutputFormat getOutputFormat() {
return outputFormat;
}
}
|
// ... existing code ...
names = { "-f", "--output-format" },
description = "Set output text format."
)
private OutputFormat outputFormat = OutputFormat.PLAINTEXT;
public boolean isHelp() {
return help;
// ... rest of the code ...
|
76e766998984da126de4cb6121c07690fbdeeba1
|
pystil/data/graph/line.py
|
pystil/data/graph/line.py
|
"""Treat line data"""
from pystil.data.utils import make_time_serie, on, between
from pystil.db import db, count, Visit, distinct
def process_data(site, graph, criteria, from_date, to_date, step, stamp, lang):
rq = (db.session
.query(Visit.day.label("key"),
count(distinct(Visit.uuid)).label("count")
if criteria == 'unique' else count(1).label("count"))
.filter(on(site))
.filter(between(from_date, to_date)))
if criteria == 'new':
rq = rq.filter(Visit.last_visit == None)
results = rq.group_by(Visit.day).order_by(Visit.day).all()
return make_time_serie(results, criteria, from_date, to_date, lang)
|
"""Treat line data"""
from pystil.data.utils import make_time_serie, on, between
from pystil.db import db, count, Visit, distinct
def process_data(site, graph, criteria, from_date, to_date, step, stamp, lang):
rq = (db.session
.query(Visit.day.label("key"),
count(distinct(Visit.uuid)).label("count")
if criteria in ('unique', 'new') else count(1).label("count"))
.filter(on(site))
.filter(between(from_date, to_date)))
if criteria == 'new':
rq = rq.filter(Visit.last_visit == None)
results = rq.group_by(Visit.day).order_by(Visit.day).all()
return make_time_serie(results, criteria, from_date, to_date, lang)
|
Fix the new/unique visits (at least!)
|
Fix the new/unique visits (at least!)
|
Python
|
bsd-3-clause
|
Kozea/pystil,Kozea/pystil,Kozea/pystil,Kozea/pystil,Kozea/pystil
|
python
|
## Code Before:
"""Treat line data"""
from pystil.data.utils import make_time_serie, on, between
from pystil.db import db, count, Visit, distinct
def process_data(site, graph, criteria, from_date, to_date, step, stamp, lang):
rq = (db.session
.query(Visit.day.label("key"),
count(distinct(Visit.uuid)).label("count")
if criteria == 'unique' else count(1).label("count"))
.filter(on(site))
.filter(between(from_date, to_date)))
if criteria == 'new':
rq = rq.filter(Visit.last_visit == None)
results = rq.group_by(Visit.day).order_by(Visit.day).all()
return make_time_serie(results, criteria, from_date, to_date, lang)
## Instruction:
Fix the new/unique visits (at least!)
## Code After:
"""Treat line data"""
from pystil.data.utils import make_time_serie, on, between
from pystil.db import db, count, Visit, distinct
def process_data(site, graph, criteria, from_date, to_date, step, stamp, lang):
rq = (db.session
.query(Visit.day.label("key"),
count(distinct(Visit.uuid)).label("count")
if criteria in ('unique', 'new') else count(1).label("count"))
.filter(on(site))
.filter(between(from_date, to_date)))
if criteria == 'new':
rq = rq.filter(Visit.last_visit == None)
results = rq.group_by(Visit.day).order_by(Visit.day).all()
return make_time_serie(results, criteria, from_date, to_date, lang)
|
# ... existing code ...
rq = (db.session
.query(Visit.day.label("key"),
count(distinct(Visit.uuid)).label("count")
if criteria in ('unique', 'new') else count(1).label("count"))
.filter(on(site))
.filter(between(from_date, to_date)))
# ... rest of the code ...
|
28078cb22cde99650f39669819f683ba7cda5f2a
|
mockui-find/src/main/java/com/autonomy/abc/selenium/find/filters/ParametricFilterNode.java
|
mockui-find/src/main/java/com/autonomy/abc/selenium/find/filters/ParametricFilterNode.java
|
package com.autonomy.abc.selenium.find.filters;
import com.hp.autonomy.frontend.selenium.util.ElementUtil;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import java.util.List;
import static com.sun.xml.internal.ws.spi.db.BindingContextFactory.LOGGER;
public class ParametricFilterNode extends FilterNode {
ParametricFilterNode(WebElement element, WebDriver webDriver) {
super(element, webDriver);
}
public List<WebElement> getChildren(){
return getContainer().findElements(By.className("parametric-value-name"));
}
@Override
public List<String> getChildNames() {
return ElementUtil.getTexts(getChildren());
}
public List<WebElement> getChildDocCount(){
return getContainer().findElements(By.className("parametric-value-count"));
}
public List<WebElement> getFullChildrenElements(){
return getContainer().findElements(By.className("parametric-value-element"));
}
public int getTotalDocNumber(){
int total=0;
for(WebElement element:getChildDocCount()){
//gets text, trims brackets and casts to int
total+=Integer.parseInt(element.getText().replaceAll("[()]",""));
}
return total;
}
}
|
package com.autonomy.abc.selenium.find.filters;
import com.hp.autonomy.frontend.selenium.util.ElementUtil;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import java.util.List;
public class ParametricFilterNode extends FilterNode {
ParametricFilterNode(WebElement element, WebDriver webDriver) {
super(element, webDriver);
}
public List<WebElement> getChildren(){
return getContainer().findElements(By.className("parametric-value-name"));
}
@Override
public List<String> getChildNames() {
return ElementUtil.getTexts(getChildren());
}
public List<WebElement> getChildDocCount(){
return getContainer().findElements(By.className("parametric-value-count"));
}
public List<WebElement> getFullChildrenElements(){
return getContainer().findElements(By.className("parametric-value-element"));
}
public int getTotalDocNumber(){
int total=0;
for(WebElement element:getChildDocCount()){
//gets text, trims brackets and casts to int
total+=Integer.parseInt(element.getText().replaceAll("[()]",""));
}
return total;
}
}
|
Remove bad import to fix build
|
Remove bad import to fix build
|
Java
|
mit
|
hpautonomy/find,hpautonomy/find,hpe-idol/find,hpe-idol/find,hpe-idol/find,hpautonomy/find,hpe-idol/java-powerpoint-report,hpe-idol/find,hpe-idol/java-powerpoint-report,hpautonomy/find,hpe-idol/find,hpautonomy/find
|
java
|
## Code Before:
package com.autonomy.abc.selenium.find.filters;
import com.hp.autonomy.frontend.selenium.util.ElementUtil;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import java.util.List;
import static com.sun.xml.internal.ws.spi.db.BindingContextFactory.LOGGER;
public class ParametricFilterNode extends FilterNode {
ParametricFilterNode(WebElement element, WebDriver webDriver) {
super(element, webDriver);
}
public List<WebElement> getChildren(){
return getContainer().findElements(By.className("parametric-value-name"));
}
@Override
public List<String> getChildNames() {
return ElementUtil.getTexts(getChildren());
}
public List<WebElement> getChildDocCount(){
return getContainer().findElements(By.className("parametric-value-count"));
}
public List<WebElement> getFullChildrenElements(){
return getContainer().findElements(By.className("parametric-value-element"));
}
public int getTotalDocNumber(){
int total=0;
for(WebElement element:getChildDocCount()){
//gets text, trims brackets and casts to int
total+=Integer.parseInt(element.getText().replaceAll("[()]",""));
}
return total;
}
}
## Instruction:
Remove bad import to fix build
## Code After:
package com.autonomy.abc.selenium.find.filters;
import com.hp.autonomy.frontend.selenium.util.ElementUtil;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import java.util.List;
public class ParametricFilterNode extends FilterNode {
ParametricFilterNode(WebElement element, WebDriver webDriver) {
super(element, webDriver);
}
public List<WebElement> getChildren(){
return getContainer().findElements(By.className("parametric-value-name"));
}
@Override
public List<String> getChildNames() {
return ElementUtil.getTexts(getChildren());
}
public List<WebElement> getChildDocCount(){
return getContainer().findElements(By.className("parametric-value-count"));
}
public List<WebElement> getFullChildrenElements(){
return getContainer().findElements(By.className("parametric-value-element"));
}
public int getTotalDocNumber(){
int total=0;
for(WebElement element:getChildDocCount()){
//gets text, trims brackets and casts to int
total+=Integer.parseInt(element.getText().replaceAll("[()]",""));
}
return total;
}
}
|
...
import org.openqa.selenium.WebElement;
import java.util.List;
public class ParametricFilterNode extends FilterNode {
...
|
0cd5deefc61f56351af24f6597a1509ea4b4b567
|
settings.py
|
settings.py
|
import os
INTERVAL = int(os.environ.get('INTERVAL', 60))
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_REGION = os.environ.get('AWS_REGION', 'us-west-2')
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
LOG_FILE = 'rightnowalerts.log'
|
import os
INTERVAL = int(os.environ.get('INTERVAL', 60))
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_REGION = os.environ.get('AWS_REGION', 'us-west-2')
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOG_FILE = os.environ.get('LOG_FILE', BASE_DIR + '/rightnowalerts.log')
|
Read log file from ENV and add full path for default
|
Read log file from ENV and add full path for default
|
Python
|
mit
|
lorden/right-now-alerts
|
python
|
## Code Before:
import os
INTERVAL = int(os.environ.get('INTERVAL', 60))
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_REGION = os.environ.get('AWS_REGION', 'us-west-2')
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
LOG_FILE = 'rightnowalerts.log'
## Instruction:
Read log file from ENV and add full path for default
## Code After:
import os
INTERVAL = int(os.environ.get('INTERVAL', 60))
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_REGION = os.environ.get('AWS_REGION', 'us-west-2')
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOG_FILE = os.environ.get('LOG_FILE', BASE_DIR + '/rightnowalerts.log')
|
# ... existing code ...
ALERTS = os.environ['ALERTS']
ANALYTICS_KEY_NAME = os.environ['ANALYTICS_KEY_NAME']
FROM_EMAIL = os.environ['FROM_EMAIL']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOG_FILE = os.environ.get('LOG_FILE', BASE_DIR + '/rightnowalerts.log')
# ... rest of the code ...
|
13d5ba1e887e0c9520c7f30b08a740fc7529427c
|
src/main/java/com/wyverngame/rmi/api/method/GetBattleRanksMethod.java
|
src/main/java/com/wyverngame/rmi/api/method/GetBattleRanksMethod.java
|
package com.wyverngame.rmi.api.method;
import java.rmi.RemoteException;
import java.util.Map;
import com.wyverngame.rmi.api.RMIClient;
import com.wyverngame.rmi.api.http.Request;
import com.wyverngame.rmi.api.response.BattleRanksResponse;
public final class GetBattleRanksMethod extends Method<BattleRanksResponse> {
@Override
public BattleRanksResponse process(RMIClient client, Request request) throws RemoteException {
int limit = Integer.parseInt(request.getOrDefault("limit", Integer.toString(Integer.MAX_VALUE)));
Map<String, Integer> ranks = client.getWebInterface().getBattleRanks(limit);
return new BattleRanksResponse(ranks);
}
}
|
package com.wyverngame.rmi.api.method;
import java.rmi.RemoteException;
import java.util.Map;
import com.wyverngame.rmi.api.RMIClient;
import com.wyverngame.rmi.api.http.Request;
import com.wyverngame.rmi.api.response.BattleRanksResponse;
public final class GetBattleRanksMethod extends Method<BattleRanksResponse> {
@Override
public BattleRanksResponse process(RMIClient client, Request request) throws RemoteException {
int limit = Integer.parseInt(request.getOrDefault("limit", Integer.toString(10)));
Map<String, Integer> ranks = client.getWebInterface().getBattleRanks(limit);
return new BattleRanksResponse(ranks);
}
}
|
Update battle ranks cap to 10.
|
Update battle ranks cap to 10.
|
Java
|
mit
|
jonathanedgecombe/wu-json-rmi-api
|
java
|
## Code Before:
package com.wyverngame.rmi.api.method;
import java.rmi.RemoteException;
import java.util.Map;
import com.wyverngame.rmi.api.RMIClient;
import com.wyverngame.rmi.api.http.Request;
import com.wyverngame.rmi.api.response.BattleRanksResponse;
public final class GetBattleRanksMethod extends Method<BattleRanksResponse> {
@Override
public BattleRanksResponse process(RMIClient client, Request request) throws RemoteException {
int limit = Integer.parseInt(request.getOrDefault("limit", Integer.toString(Integer.MAX_VALUE)));
Map<String, Integer> ranks = client.getWebInterface().getBattleRanks(limit);
return new BattleRanksResponse(ranks);
}
}
## Instruction:
Update battle ranks cap to 10.
## Code After:
package com.wyverngame.rmi.api.method;
import java.rmi.RemoteException;
import java.util.Map;
import com.wyverngame.rmi.api.RMIClient;
import com.wyverngame.rmi.api.http.Request;
import com.wyverngame.rmi.api.response.BattleRanksResponse;
public final class GetBattleRanksMethod extends Method<BattleRanksResponse> {
@Override
public BattleRanksResponse process(RMIClient client, Request request) throws RemoteException {
int limit = Integer.parseInt(request.getOrDefault("limit", Integer.toString(10)));
Map<String, Integer> ranks = client.getWebInterface().getBattleRanks(limit);
return new BattleRanksResponse(ranks);
}
}
|
...
public final class GetBattleRanksMethod extends Method<BattleRanksResponse> {
@Override
public BattleRanksResponse process(RMIClient client, Request request) throws RemoteException {
int limit = Integer.parseInt(request.getOrDefault("limit", Integer.toString(10)));
Map<String, Integer> ranks = client.getWebInterface().getBattleRanks(limit);
return new BattleRanksResponse(ranks);
}
...
|
3d8d82be3528cc0150dac0c8ade1f6c306b412e4
|
channels/apps.py
|
channels/apps.py
|
from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
Add early import to fix problems with other packages and Twisted.
|
Add early import to fix problems with other packages and Twisted.
|
Python
|
bsd-3-clause
|
andrewgodwin/channels,andrewgodwin/django-channels,django/channels
|
python
|
## Code Before:
from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
## Instruction:
Add early import to fix problems with other packages and Twisted.
## Code After:
from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
// ... existing code ...
from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
// ... rest of the code ...
|
b02d7e1e288eeaf38cfc299765f4c940bad5ea36
|
examples/add_misc_features.py
|
examples/add_misc_features.py
|
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
|
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
|
Update example with correct form, and with comment.
|
Update example with correct form, and with comment.
|
Python
|
mit
|
pyconll/pyconll,pyconll/pyconll
|
python
|
## Code Before:
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
## Instruction:
Update example with correct form, and with comment.
## Code After:
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
|
...
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
...
|
77fd12a850fbca0b3308e964e457f234d12d7c11
|
src/wad.blog/wad/blog/utils.py
|
src/wad.blog/wad/blog/utils.py
|
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
|
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
|
Fix portlet assignment context utility
|
Fix portlet assignment context utility
|
Python
|
mit
|
potzenheimer/buildout.wad,potzenheimer/buildout.wad
|
python
|
## Code Before:
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = context.aq_inner
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager, manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is assignment.aq_base:
return context
except ComponentLookupError:
pass
parent = context.aq_parent
if parent is context:
return None
context = parent
## Instruction:
Fix portlet assignment context utility
## Code After:
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
|
# ... existing code ...
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import aq_base
from plone.portlets.interfaces import IPortletAssignmentMapping
from plone.portlets.interfaces import IPortletManager
from zope.component import getUtility, getMultiAdapter, ComponentLookupError
def find_assignment_context(assignment, context):
# Finds the creation context of the assignment
context = aq_inner(context)
manager_name = assignment.manager.__name__
assignment_name = assignment.__name__
while True:
try:
manager = getUtility(IPortletManager,
manager_name,
context=context)
mapping = getMultiAdapter((context, manager),
IPortletAssignmentMapping)
if assignment_name in mapping:
if mapping[assignment_name] is aq_base(assignment):
return context
except ComponentLookupError:
pass
parent = aq_parent(context)
if parent is context:
return None
context = parent
# ... rest of the code ...
|
e4b47c9bc3de18c83a2fb718c806b7668b492de6
|
authentication/urls.py
|
authentication/urls.py
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login'))
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout'))
|
Add name to logout url regex
|
Add name to logout url regex
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
python
|
## Code Before:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login'))
## Instruction:
Add name to logout url regex
## Code After:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout'))
|
# ... existing code ...
urlpatterns = patterns('django.contrib.auth.views',
url(r'^login/$', 'login', {'template_name': 'authentication/login.html'}),
url(r'^logout/$', 'logout_then_login',name='logout'))
# ... rest of the code ...
|
5cf17b6a46a3d4bbf4cecb65e4b9ef43066869d9
|
feincms/templatetags/applicationcontent_tags.py
|
feincms/templatetags/applicationcontent_tags.py
|
from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
getattr(page.content, region) if isinstance(content, ApplicationContent))
|
from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
page.content.all_of_type(ApplicationContent) if content.region == region)
|
Use all_of_type instead of isinstance check in feincms_render_region_appcontent
|
Use all_of_type instead of isinstance check in feincms_render_region_appcontent
|
Python
|
bsd-3-clause
|
feincms/feincms,joshuajonah/feincms,feincms/feincms,matthiask/feincms2-content,matthiask/django-content-editor,michaelkuty/feincms,mjl/feincms,matthiask/feincms2-content,mjl/feincms,matthiask/django-content-editor,matthiask/django-content-editor,michaelkuty/feincms,nickburlett/feincms,matthiask/django-content-editor,joshuajonah/feincms,matthiask/feincms2-content,nickburlett/feincms,pjdelport/feincms,pjdelport/feincms,joshuajonah/feincms,pjdelport/feincms,michaelkuty/feincms,michaelkuty/feincms,nickburlett/feincms,feincms/feincms,nickburlett/feincms,joshuajonah/feincms,mjl/feincms
|
python
|
## Code Before:
from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
getattr(page.content, region) if isinstance(content, ApplicationContent))
## Instruction:
Use all_of_type instead of isinstance check in feincms_render_region_appcontent
## Code After:
from django import template
# backwards compatibility import
from feincms.templatetags.fragment_tags import fragment, get_fragment, has_fragment
register = template.Library()
register.tag(fragment)
register.tag(get_fragment)
register.filter(has_fragment)
@register.simple_tag
def feincms_render_region_appcontent(page, region, request):
"""Render only the application content for the region
This allows template authors to choose whether their page behaves
differently when displaying embedded application subpages by doing
something like this::
{% if not in_appcontent_subpage %}
{% feincms_render_region feincms_page "main" request %}
{% else %}
{% feincms_render_region_appcontent feincms_page "main" request %}
{% endif %}
"""
from feincms.content.application.models import ApplicationContent
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
page.content.all_of_type(ApplicationContent) if content.region == region)
|
...
from feincms.templatetags.feincms_tags import _render_content
return u''.join(_render_content(content, request=request) for content in\
page.content.all_of_type(ApplicationContent) if content.region == region)
...
|
84929e01bfb9236fd0f51d82ee514d513d018408
|
triangle/triangle.py
|
triangle/triangle.py
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
|
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
Sort dimensins to reduce code
|
Sort dimensins to reduce code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
python
|
## Code Before:
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = dims
def kind(self):
a, b, c = self.dims
if a == b and b == c:
return "equilateral"
elif a == b or b == c or a == c:
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
return False
a, b, c = dims
return (a > 0 and b > 0 and c > 0) \
and (a + b > c and a + c > b and b + c > a)
## Instruction:
Sort dimensins to reduce code
## Code After:
class TriangleError(Exception):
pass
class Triangle(object):
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
|
...
def __init__(self, *dims):
if not self.is_valid(dims):
raise TriangleError("Invalid dimensions: {}".format(dims))
self.dims = sorted(dims)
def kind(self):
a, b, c = self.dims
if a == b and b == c: # implies a == c
return "equilateral"
elif a == b or b == c: # sorted, so a < c here unless a == c above
return "isosceles"
else:
return "scalene"
...
@staticmethod
def is_valid(dims):
if len(dims) != 3:
raise ValueError("Triangles have 3 sides")
a, b, c = sorted(dims)
return a > 0 and a + b > c
...
|
0aa757955d631df9fb8e6cbe3e372dcae56e2255
|
django_mailbox/transports/imap.py
|
django_mailbox/transports/imap.py
|
from imaplib import IMAP4, IMAP4_SSL
from .base import EmailTransport, MessageParseError
class ImapTransport(EmailTransport):
def __init__(self, hostname, port=None, ssl=False, archive=''):
self.hostname = hostname
self.port = port
self.archive = archive
if ssl:
self.transport = IMAP4_SSL
if not self.port:
self.port = 993
else:
self.transport = IMAP4
if not self.port:
self.port = 143
def connect(self, username, password):
self.server = self.transport(self.hostname, self.port)
typ, msg = self.server.login(username, password)
self.server.select()
def get_message(self):
typ, inbox = self.server.search(None, 'ALL')
if not inbox[0]:
return
if self.archive:
typ, folders = self.server.list(pattern=self.archive)
if folders[0] == None:
self.archive = False
for key in inbox[0].split():
try:
typ, msg_contents = self.server.fetch(key, '(RFC822)')
message = self.get_email_from_bytes(msg_contents[0][1])
yield message
except MessageParseError:
continue
if self.archive:
self.server.copy(key, self.archive)
self.server.store(key, "+FLAGS", "\\Deleted")
self.server.expunge()
return
|
from imaplib import IMAP4, IMAP4_SSL
from .base import EmailTransport, MessageParseError
class ImapTransport(EmailTransport):
def __init__(self, hostname, port=None, ssl=False, archive=''):
self.hostname = hostname
self.port = port
self.archive = archive
if ssl:
self.transport = IMAP4_SSL
if not self.port:
self.port = 993
else:
self.transport = IMAP4
if not self.port:
self.port = 143
def connect(self, username, password):
self.server = self.transport(self.hostname, self.port)
typ, msg = self.server.login(username, password)
self.server.select()
def get_message(self):
typ, inbox = self.server.search(None, 'ALL')
if not inbox[0]:
return
if self.archive:
typ, folders = self.server.list(pattern=self.archive)
if folders[0] is None:
# If the archive folder does not exist, create it
self.server.create(self.archive)
for key in inbox[0].split():
try:
typ, msg_contents = self.server.fetch(key, '(RFC822)')
message = self.get_email_from_bytes(msg_contents[0][1])
yield message
except MessageParseError:
continue
if self.archive:
self.server.copy(key, self.archive)
self.server.store(key, "+FLAGS", "\\Deleted")
self.server.expunge()
return
|
Create archive folder if it does not exist.
|
Create archive folder if it does not exist.
|
Python
|
mit
|
coddingtonbear/django-mailbox,ad-m/django-mailbox,Shekharrajak/django-mailbox,leifurhauks/django-mailbox
|
python
|
## Code Before:
from imaplib import IMAP4, IMAP4_SSL
from .base import EmailTransport, MessageParseError
class ImapTransport(EmailTransport):
def __init__(self, hostname, port=None, ssl=False, archive=''):
self.hostname = hostname
self.port = port
self.archive = archive
if ssl:
self.transport = IMAP4_SSL
if not self.port:
self.port = 993
else:
self.transport = IMAP4
if not self.port:
self.port = 143
def connect(self, username, password):
self.server = self.transport(self.hostname, self.port)
typ, msg = self.server.login(username, password)
self.server.select()
def get_message(self):
typ, inbox = self.server.search(None, 'ALL')
if not inbox[0]:
return
if self.archive:
typ, folders = self.server.list(pattern=self.archive)
if folders[0] == None:
self.archive = False
for key in inbox[0].split():
try:
typ, msg_contents = self.server.fetch(key, '(RFC822)')
message = self.get_email_from_bytes(msg_contents[0][1])
yield message
except MessageParseError:
continue
if self.archive:
self.server.copy(key, self.archive)
self.server.store(key, "+FLAGS", "\\Deleted")
self.server.expunge()
return
## Instruction:
Create archive folder if it does not exist.
## Code After:
from imaplib import IMAP4, IMAP4_SSL
from .base import EmailTransport, MessageParseError
class ImapTransport(EmailTransport):
def __init__(self, hostname, port=None, ssl=False, archive=''):
self.hostname = hostname
self.port = port
self.archive = archive
if ssl:
self.transport = IMAP4_SSL
if not self.port:
self.port = 993
else:
self.transport = IMAP4
if not self.port:
self.port = 143
def connect(self, username, password):
self.server = self.transport(self.hostname, self.port)
typ, msg = self.server.login(username, password)
self.server.select()
def get_message(self):
typ, inbox = self.server.search(None, 'ALL')
if not inbox[0]:
return
if self.archive:
typ, folders = self.server.list(pattern=self.archive)
if folders[0] is None:
# If the archive folder does not exist, create it
self.server.create(self.archive)
for key in inbox[0].split():
try:
typ, msg_contents = self.server.fetch(key, '(RFC822)')
message = self.get_email_from_bytes(msg_contents[0][1])
yield message
except MessageParseError:
continue
if self.archive:
self.server.copy(key, self.archive)
self.server.store(key, "+FLAGS", "\\Deleted")
self.server.expunge()
return
|
// ... existing code ...
if self.archive:
typ, folders = self.server.list(pattern=self.archive)
if folders[0] is None:
# If the archive folder does not exist, create it
self.server.create(self.archive)
for key in inbox[0].split():
try:
// ... rest of the code ...
|
531974ce5d621b903608aa226110277f77918167
|
tools/reset_gids.py
|
tools/reset_gids.py
|
import types
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
from sfa.trust.hierarchy import Hierarchy
from sfa.util.xrn import Xrn
from sfa.trust.certificate import Certificate, Keypair, convert_public_key
def fix_users():
s=global_dbsession
hierarchy = Hierarchy()
users = s.query(RegRecord).filter_by(type="user")
for record in users:
record.gid = ""
if not record.gid:
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
if pub_key is not None:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
email=getattr(record,'email',None)
gid_object = hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
s.commit()
if __name__ == '__main__':
fix_users()
|
import types
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
from sfa.trust.hierarchy import Hierarchy
from sfa.util.xrn import Xrn
from sfa.trust.certificate import Certificate, Keypair, convert_public_key
def fix_users():
s=global_dbsession
hierarchy = Hierarchy()
users = s.query(RegRecord).filter_by(type="user")
for record in users:
record.gid = ""
if not record.gid:
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
print pub_key
if len(pub_key) > 0:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
email=getattr(record,'email',None)
gid_object = hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
s.commit()
if __name__ == '__main__':
fix_users()
|
Reset GIDs works even if user has no pub_key
|
Fix: Reset GIDs works even if user has no pub_key
|
Python
|
mit
|
yippeecw/sfa,onelab-eu/sfa,onelab-eu/sfa,yippeecw/sfa,onelab-eu/sfa,yippeecw/sfa
|
python
|
## Code Before:
import types
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
from sfa.trust.hierarchy import Hierarchy
from sfa.util.xrn import Xrn
from sfa.trust.certificate import Certificate, Keypair, convert_public_key
def fix_users():
s=global_dbsession
hierarchy = Hierarchy()
users = s.query(RegRecord).filter_by(type="user")
for record in users:
record.gid = ""
if not record.gid:
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
if pub_key is not None:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
email=getattr(record,'email',None)
gid_object = hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
s.commit()
if __name__ == '__main__':
fix_users()
## Instruction:
Fix: Reset GIDs works even if user has no pub_key
## Code After:
import types
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
from sfa.trust.hierarchy import Hierarchy
from sfa.util.xrn import Xrn
from sfa.trust.certificate import Certificate, Keypair, convert_public_key
def fix_users():
s=global_dbsession
hierarchy = Hierarchy()
users = s.query(RegRecord).filter_by(type="user")
for record in users:
record.gid = ""
if not record.gid:
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
print pub_key
if len(pub_key) > 0:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
email=getattr(record,'email',None)
gid_object = hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
s.commit()
if __name__ == '__main__':
fix_users()
|
...
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
print pub_key
if len(pub_key) > 0:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
...
|
c34817c2740e860493692b630a11fdb7acab76aa
|
tests/test_simple_features.py
|
tests/test_simple_features.py
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(POINTS))
features = wordgraph.describe(datapoints)
assert "" in features
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
|
Test case for monotonically decreasing graphs
|
Test case for monotonically decreasing graphs
Generate time series data for values that decrease monotonically over
time.
|
Python
|
apache-2.0
|
tleeuwenburg/wordgraph,tleeuwenburg/wordgraph
|
python
|
## Code Before:
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(POINTS))
features = wordgraph.describe(datapoints)
assert "" in features
## Instruction:
Test case for monotonically decreasing graphs
Generate time series data for values that decrease monotonically over
time.
## Code After:
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
|
# ... existing code ...
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
# ... rest of the code ...
|
59414b372770f32d94689711acde04b7c7d75fea
|
ReactiveCocoaFramework/ReactiveCocoa/RACDelegateProxy.h
|
ReactiveCocoaFramework/ReactiveCocoa/RACDelegateProxy.h
|
//
// RACDelegateProxy.h
// ReactiveCocoa
//
// Created by Cody Krieger on 5/19/12.
// Copyright (c) 2012 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
// A delegate object suitable for using -rac_signalForSelector:fromProtocol:
// upon.
@interface RACDelegateProxy : NSObject
// The delegate to which messages should be forwarded if not handled by
// any -rac_signalForSelector:fromProtocol: applications.
@property (nonatomic, weak) id rac_proxiedDelegate;
// Creates a delegate proxy which will respond to selectors from `protocol`.
- (instancetype)initWithProtocol:(Protocol *)protocol;
@end
|
//
// RACDelegateProxy.h
// ReactiveCocoa
//
// Created by Cody Krieger on 5/19/12.
// Copyright (c) 2012 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
// A delegate object suitable for using -rac_signalForSelector:fromProtocol:
// upon.
@interface RACDelegateProxy : NSObject
// The delegate to which messages should be forwarded if not handled by
// any -rac_signalForSelector:fromProtocol: applications.
@property (nonatomic, unsafe_unretained) id rac_proxiedDelegate;
// Creates a delegate proxy which will respond to selectors from `protocol`.
- (instancetype)initWithProtocol:(Protocol *)protocol;
@end
|
Use unsafe_unretained for delegates, to support OS X
|
Use unsafe_unretained for delegates, to support OS X
|
C
|
mit
|
zhigang1992/ReactiveCocoa,nickcheng/ReactiveCocoa,andersio/ReactiveCocoa,ShawnLeee/ReactiveCocoa,chieryw/ReactiveCocoa,msdgwzhy6/ReactiveCocoa,j364960953/ReactiveCocoa,zhukaixy/ReactiveCocoa,WEIBP/ReactiveCocoa,longv2go/ReactiveCocoa,Ray0218/ReactiveCocoa,mtxs007/ReactiveCocoa,zzqiltw/ReactiveCocoa,wpstarnice/ReactiveCocoa,taylormoonxu/ReactiveCocoa,natan/ReactiveCocoa,hj3938/ReactiveCocoa,taylormoonxu/ReactiveCocoa,ceekayel/ReactiveCocoa,eyu1988/ReactiveCocoa,Juraldinio/ReactiveCocoa,sujeking/ReactiveCocoa,leichunfeng/ReactiveCocoa,jrmiddle/ReactiveCocoa,KuPai32G/ReactiveCocoa,fanghao085/ReactiveCocoa,itschaitanya/ReactiveCocoa,takeshineshiro/ReactiveCocoa,Farteen/ReactiveCocoa,walkingsmarts/ReactiveCocoa,terry408911/ReactiveCocoa,Rupert-RR/ReactiveCocoa,buildo/ReactiveCocoa,kiurentu/ReactiveCocoa,isghe/ReactiveCocoa,beni55/ReactiveCocoa,yonekawa/ReactiveCocoa,ikesyo/ReactiveCocoa,tipbit/ReactiveCocoa,BrooksWon/ReactiveCocoa,j364960953/ReactiveCocoa,cstars135/ReactiveCocoa,libiao88/ReactiveCocoa,yizzuide/ReactiveCocoa,dachaoisme/ReactiveCocoa,WEIBP/ReactiveCocoa,jsslai/ReactiveCocoa,bscarano/ReactiveCocoa,isghe/ReactiveCocoa,loupman/ReactiveCocoa,shuxiashusheng/ReactiveCocoa,ReactiveCocoa/ReactiveSwift,monkeydbobo/ReactiveCocoa,FelixYin66/ReactiveCocoa,DreamHill/ReactiveCocoa,beni55/ReactiveCocoa,FelixYin66/ReactiveCocoa,JohnJin007/ReactiveCocoa,335g/ReactiveCocoa,calebd/ReactiveCocoa,rpowelll/ReactiveCocoa,LHDsimon/ReactiveCocoa,alvinvarghese/ReactiveCocoa,howandhao/ReactiveCocoa,calebd/ReactiveCocoa,emodeqidao/ReactiveCocoa,esttorhe/ReactiveCocoa,fhchina/ReactiveCocoa,gabemdev/ReactiveCocoa,cnbin/ReactiveCocoa,JohnJin007/ReactiveCocoa,on99/ReactiveCocoa,Juraldinio/ReactiveCocoa,paulyoung/ReactiveCocoa,dz1111/ReactiveCocoa,ReactiveCocoa/ReactiveSwift,mattpetters/ReactiveCocoa,ohwutup/ReactiveCocoa,SmartEncounter/ReactiveCocoa,WEIBP/ReactiveCocoa,brightcove/ReactiveCocoa,yangshengchaoios/ReactiveCocoa,mxxiv/ReactiveCocoa,yytong/ReactiveCocoa,Khan/ReactiveCocoa,Pingco/ReactiveCocoa,loupman/ReactiveCocoa,shuxiashusheng/ReactiveCocoa,jsslai/ReactiveCocoa,almassapargali/ReactiveCocoa,andersio/ReactiveCocoa,Farteen/ReactiveCocoa,wpstarnice/ReactiveCocoa,SuPair/ReactiveCocoa,hbucius/ReactiveCocoa,wpstarnice/ReactiveCocoa,towik/ReactiveCocoa,nickcheng/ReactiveCocoa,200895045/ReactiveCocoa,add715/ReactiveCocoa,imkerberos/ReactiveCocoa,DongDongDongDong/ReactiveCocoa,cogddo/ReactiveCocoa,KJin99/ReactiveCocoa,CQXfly/ReactiveCocoa,jsslai/ReactiveCocoa,jaylib/ReactiveCocoa,valleyman86/ReactiveCocoa,ericzhou2008/ReactiveCocoa,brightcove/ReactiveCocoa,Eveian/ReactiveCocoa,icepy/ReactiveCocoa,AllanChen/ReactiveCocoa,nickcheng/ReactiveCocoa,vincentiss/ReactiveCocoa,chieryw/ReactiveCocoa,goodheart/ReactiveCocoa,gengjf/ReactiveCocoa,vincentiss/ReactiveCocoa,zxq3220122/ReactiveCocoa,Eveian/ReactiveCocoa,sdhzwm/ReactiveCocoa,ReactiveCocoa/ReactiveSwift,xiaoliyang/ReactiveCocoa,ddc391565320/ReactiveCocoa,yangyangluoluo/ReactiveCocoa,huiping192/ReactiveCocoa,Ricowere/ReactiveCocoa,hbucius/ReactiveCocoa,PSPDFKit-labs/ReactiveCocoa,Carthage/ReactiveCocoa,victorlin/ReactiveCocoa,tzongw/ReactiveCocoa,qq644531343/ReactiveCocoa,Carthage/ReactiveCocoa,SuPair/ReactiveCocoa,koamac/ReactiveCocoa,cogddo/ReactiveCocoa,JackLian/ReactiveCocoa,jianwoo/ReactiveCocoa,juliangrosshauser/ReactiveCocoa,tonyarnold/ReactiveCocoa,ailyanlu/ReactiveCocoa,jackywpy/ReactiveCocoa,CQXfly/ReactiveCocoa,paulyoung/ReactiveCocoa,stupidfive/ReactiveCocoa,RuiAAPeres/ReactiveCocoa,liufeigit/ReactiveCocoa,OnTheWay1988/ReactiveCocoa,jeelun/ReactiveCocoa,wangqi211/ReactiveCocoa,stupidfive/ReactiveCocoa,j364960953/ReactiveCocoa,yizzuide/ReactiveCocoa,jam891/ReactiveCocoa,swizzlr/ReactiveCocoa,Adlai-Holler/ReactiveCocoa,Ethan89/ReactiveCocoa,Liquidsoul/ReactiveCocoa,on99/ReactiveCocoa,huiping192/ReactiveCocoa,tzongw/ReactiveCocoa,tonyli508/ReactiveCocoa,itschaitanya/ReactiveCocoa,takeshineshiro/ReactiveCocoa,bensonday/ReactiveCocoa,ioshger0125/ReactiveCocoa,chieryw/ReactiveCocoa,KuPai32G/ReactiveCocoa,ioshger0125/ReactiveCocoa,jrmiddle/ReactiveCocoa,sandyway/ReactiveCocoa,PSPDFKit-labs/ReactiveCocoa,wangqi211/ReactiveCocoa,zzzworm/ReactiveCocoa,zhiwen1024/ReactiveCocoa,isghe/ReactiveCocoa,natestedman/ReactiveCocoa,yaoxiaoyong/ReactiveCocoa,chao95957/ReactiveCocoa,nikita-leonov/ReactiveCocoa,nikita-leonov/ReactiveCocoa,zxq3220122/ReactiveCocoa,jackywpy/ReactiveCocoa,CCOOOOLL/ReactiveCocoa,taylormoonxu/ReactiveCocoa,yizzuide/ReactiveCocoa,yangyangluoluo/ReactiveCocoa,llb1119/test,brasbug/ReactiveCocoa,nikita-leonov/ReactiveCocoa,yonekawa/ReactiveCocoa,victorlin/ReactiveCocoa,esttorhe/ReactiveCocoa,hbucius/ReactiveCocoa,brasbug/ReactiveCocoa,zhukaixy/ReactiveCocoa,juliangrosshauser/ReactiveCocoa,CQXfly/ReactiveCocoa,SanChain/ReactiveCocoa,KuPai32G/ReactiveCocoa,cogddo/ReactiveCocoa,richeterre/ReactiveCocoa,dskatz22/ReactiveCocoa,Ricowere/ReactiveCocoa,GuitarPlayer-Ma/ReactiveCocoa,ericzhou2008/ReactiveCocoa,Liquidsoul/ReactiveCocoa,zhiwen1024/ReactiveCocoa,lixar/ReactiveCocoa,AndyZhaoHe/ReactiveCocoa,zhaoguohui/ReactiveCocoa,jianwoo/ReactiveCocoa,jeelun/ReactiveCocoa,cnbin/ReactiveCocoa,goodheart/ReactiveCocoa,tiger8888/ReactiveCocoa,dskatz22/ReactiveCocoa,qq644531343/ReactiveCocoa,zhukaixy/ReactiveCocoa,kaylio/ReactiveCocoa,shaohung001/ReactiveCocoa,zhigang1992/ReactiveCocoa,jam891/ReactiveCocoa,dz1111/ReactiveCocoa,walkingsmarts/ReactiveCocoa,Eveian/ReactiveCocoa,dskatz22/ReactiveCocoa,Ricowere/ReactiveCocoa,Pingco/ReactiveCocoa,almassapargali/ReactiveCocoa,Liquidsoul/ReactiveCocoa,smilypeda/ReactiveCocoa,ztchena/ReactiveCocoa,qq644531343/ReactiveCocoa,zhenlove/ReactiveCocoa,windgo/ReactiveCocoa,valleyman86/ReactiveCocoa,richeterre/ReactiveCocoa,alvinvarghese/ReactiveCocoa,clg0118/ReactiveCocoa,emodeqidao/ReactiveCocoa,mattpetters/ReactiveCocoa,xiaobing2007/ReactiveCocoa,cstars135/ReactiveCocoa,eliperkins/ReactiveCocoa,GuitarPlayer-Ma/ReactiveCocoa,tonyarnold/ReactiveCocoa,vincentiss/ReactiveCocoa,mattpetters/ReactiveCocoa,kiurentu/ReactiveCocoa,terry408911/ReactiveCocoa,yoichitgy/ReactiveCocoa,shuxiashusheng/ReactiveCocoa,shaohung001/ReactiveCocoa,brightcove/ReactiveCocoa,CCOOOOLL/ReactiveCocoa,PSPDFKit-labs/ReactiveCocoa,kevin-zqw/ReactiveCocoa,esttorhe/ReactiveCocoa,Juraldinio/ReactiveCocoa,buildo/ReactiveCocoa,Ray0218/ReactiveCocoa,SmartEncounter/ReactiveCocoa,longv2go/ReactiveCocoa,RuiAAPeres/ReactiveCocoa,luerhouhou/ReactiveCocoa,335g/ReactiveCocoa,koamac/ReactiveCocoa,natestedman/ReactiveCocoa,msdgwzhy6/ReactiveCocoa,DongDongDongDong/ReactiveCocoa,Adlai-Holler/ReactiveCocoa,fhchina/ReactiveCocoa,yangyangluoluo/ReactiveCocoa,jrmiddle/ReactiveCocoa,dachaoisme/ReactiveCocoa,SanChain/ReactiveCocoa,AlanJN/ReactiveCocoa,libiao88/ReactiveCocoa,hj3938/ReactiveCocoa,goodheart/ReactiveCocoa,fhchina/ReactiveCocoa,xiaoliyang/ReactiveCocoa,on99/ReactiveCocoa,335g/ReactiveCocoa,pzw224/ReactiveCocoa,dullgrass/ReactiveCocoa,liufeigit/ReactiveCocoa,kaylio/ReactiveCocoa,xiaoliyang/ReactiveCocoa,Khan/ReactiveCocoa,swizzlr/ReactiveCocoa,Rupert-RR/ReactiveCocoa,cnbin/ReactiveCocoa,tzongw/ReactiveCocoa,GuitarPlayer-Ma/ReactiveCocoa,lixar/ReactiveCocoa,hoanganh6491/ReactiveCocoa,loupman/ReactiveCocoa,natestedman/ReactiveCocoa,tornade0913/ReactiveCocoa,shaohung001/ReactiveCocoa,zhenlove/ReactiveCocoa,Khan/ReactiveCocoa,zzzworm/ReactiveCocoa,xumaolin/ReactiveCocoa,takeshineshiro/ReactiveCocoa,Ethan89/ReactiveCocoa,fanghao085/ReactiveCocoa,SuPair/ReactiveCocoa,hilllinux/ReactiveCocoa,add715/ReactiveCocoa,ceekayel/ReactiveCocoa,hilllinux/ReactiveCocoa,stevielu/ReactiveCocoa,Ethan89/ReactiveCocoa,hoanganh6491/ReactiveCocoa,richeterre/ReactiveCocoa,tonyarnold/ReactiveCocoa,OneSmallTree/ReactiveCocoa,yonekawa/ReactiveCocoa,stevielu/ReactiveCocoa,ailyanlu/ReactiveCocoa,tonyli508/ReactiveCocoa,ailyanlu/ReactiveCocoa,eyu1988/ReactiveCocoa,yoichitgy/ReactiveCocoa,jaylib/ReactiveCocoa,smilypeda/ReactiveCocoa,Farteen/ReactiveCocoa,xumaolin/ReactiveCocoa,ztchena/ReactiveCocoa,leichunfeng/ReactiveCocoa,BrooksWon/ReactiveCocoa,itschaitanya/ReactiveCocoa,howandhao/ReactiveCocoa,bscarano/ReactiveCocoa,almassapargali/ReactiveCocoa,sujeking/ReactiveCocoa,eliperkins/ReactiveCocoa,buildo/ReactiveCocoa,jeelun/ReactiveCocoa,ddc391565320/ReactiveCocoa,longv2go/ReactiveCocoa,huiping192/ReactiveCocoa,leelili/ReactiveCocoa,yaoxiaoyong/ReactiveCocoa,dachaoisme/ReactiveCocoa,towik/ReactiveCocoa,liufeigit/ReactiveCocoa,DreamHill/ReactiveCocoa,pzw224/ReactiveCocoa,lixar/ReactiveCocoa,tipbit/ReactiveCocoa,DreamHill/ReactiveCocoa,bencochran/ReactiveCocoa,yoichitgy/ReactiveCocoa,DongDongDongDong/ReactiveCocoa,chao95957/ReactiveCocoa,200895045/ReactiveCocoa,tornade0913/ReactiveCocoa,eliperkins/ReactiveCocoa,ztchena/ReactiveCocoa,OneSmallTree/ReactiveCocoa,xumaolin/ReactiveCocoa,sugar2010/ReactiveCocoa,walkingsmarts/ReactiveCocoa,llb1119/test,AllanChen/ReactiveCocoa,sdhzwm/ReactiveCocoa,tonyli508/ReactiveCocoa,xulibao/ReactiveCocoa,Ricowere/ReactiveCocoa,xulibao/ReactiveCocoa,llb1119/test,jianwoo/ReactiveCocoa,BlessNeo/ReactiveCocoa,KJin99/ReactiveCocoa,bscarano/ReactiveCocoa,nickcheng/ReactiveCocoa,ioshger0125/ReactiveCocoa,gabemdev/ReactiveCocoa,sugar2010/ReactiveCocoa,AndyZhaoHe/ReactiveCocoa,sugar2010/ReactiveCocoa,hilllinux/ReactiveCocoa,bensonday/ReactiveCocoa,OnTheWay1988/ReactiveCocoa,CCOOOOLL/ReactiveCocoa,RuiAAPeres/ReactiveCocoa,emodeqidao/ReactiveCocoa,zhiwen1024/ReactiveCocoa,beni55/ReactiveCocoa,ShawnLeee/ReactiveCocoa,icepy/ReactiveCocoa,luerhouhou/ReactiveCocoa,OneSmallTree/ReactiveCocoa,yaoxiaoyong/ReactiveCocoa,kaylio/ReactiveCocoa,gengjf/ReactiveCocoa,ddc391565320/ReactiveCocoa,sujeking/ReactiveCocoa,SanChain/ReactiveCocoa,leelili/ReactiveCocoa,BlessNeo/ReactiveCocoa,alvinvarghese/ReactiveCocoa,clg0118/ReactiveCocoa,huiping192/ReactiveCocoa,KJin99/ReactiveCocoa,zhenlove/ReactiveCocoa,jaylib/ReactiveCocoa,windgo/ReactiveCocoa,sdhzwm/ReactiveCocoa,ikesyo/ReactiveCocoa,stevielu/ReactiveCocoa,leichunfeng/ReactiveCocoa,mtxs007/ReactiveCocoa,clg0118/ReactiveCocoa,smilypeda/ReactiveCocoa,kevin-zqw/ReactiveCocoa,wangqi211/ReactiveCocoa,mxxiv/ReactiveCocoa,xiaobing2007/ReactiveCocoa,yytong/ReactiveCocoa,windgo/ReactiveCocoa,200895045/ReactiveCocoa,hj3938/ReactiveCocoa,rpowelll/ReactiveCocoa,chao95957/ReactiveCocoa,msdgwzhy6/ReactiveCocoa,kiurentu/ReactiveCocoa,Remitly/ReactiveCocoa,OnTheWay1988/ReactiveCocoa,stupidfive/ReactiveCocoa,zhigang1992/ReactiveCocoa,zzqiltw/ReactiveCocoa,Pingco/ReactiveCocoa,xulibao/ReactiveCocoa,eyu1988/ReactiveCocoa,ohwutup/ReactiveCocoa,rpowelll/ReactiveCocoa,BlessNeo/ReactiveCocoa,Remitly/ReactiveCocoa,zzqiltw/ReactiveCocoa,Remitly/ReactiveCocoa,sandyway/ReactiveCocoa,juliangrosshauser/ReactiveCocoa,calebd/ReactiveCocoa,LHDsimon/ReactiveCocoa,leelili/ReactiveCocoa,andersio/ReactiveCocoa,terry408911/ReactiveCocoa,Pikdays/ReactiveCocoa,monkeydbobo/ReactiveCocoa,add715/ReactiveCocoa,yytong/ReactiveCocoa,jam891/ReactiveCocoa,Pikdays/ReactiveCocoa,Ray0218/ReactiveCocoa,paulyoung/ReactiveCocoa,FelixYin66/ReactiveCocoa,brasbug/ReactiveCocoa,tiger8888/ReactiveCocoa,bensonday/ReactiveCocoa,monkeydbobo/ReactiveCocoa,jaylib/ReactiveCocoa,JackLian/ReactiveCocoa,icepy/ReactiveCocoa,AlanJN/ReactiveCocoa,jackywpy/ReactiveCocoa,ericzhou2008/ReactiveCocoa,hoanganh6491/ReactiveCocoa,yangshengchaoios/ReactiveCocoa,luerhouhou/ReactiveCocoa,ShawnLeee/ReactiveCocoa,valleyman86/ReactiveCocoa,yangshengchaoios/ReactiveCocoa,ceekayel/ReactiveCocoa,xiaobing2007/ReactiveCocoa,imkerberos/ReactiveCocoa,zhaoguohui/ReactiveCocoa,libiao88/ReactiveCocoa,AndyZhaoHe/ReactiveCocoa,cstars135/ReactiveCocoa,Pikdays/ReactiveCocoa,AlanJN/ReactiveCocoa,natan/ReactiveCocoa,tiger8888/ReactiveCocoa,Adlai-Holler/ReactiveCocoa,towik/ReactiveCocoa,bencochran/ReactiveCocoa,ohwutup/ReactiveCocoa,esttorhe/ReactiveCocoa,gengjf/ReactiveCocoa,howandhao/ReactiveCocoa,koamac/ReactiveCocoa,zhaoguohui/ReactiveCocoa,zxq3220122/ReactiveCocoa,JohnJin007/ReactiveCocoa,Rupert-RR/ReactiveCocoa,LHDsimon/ReactiveCocoa,JackLian/ReactiveCocoa,bencochran/ReactiveCocoa,kevin-zqw/ReactiveCocoa,mtxs007/ReactiveCocoa,tornade0913/ReactiveCocoa,zzzworm/ReactiveCocoa,pzw224/ReactiveCocoa,imkerberos/ReactiveCocoa,dullgrass/ReactiveCocoa,BrooksWon/ReactiveCocoa,victorlin/ReactiveCocoa,dullgrass/ReactiveCocoa,Carthage/ReactiveCocoa,natan/ReactiveCocoa,sandyway/ReactiveCocoa,fanghao085/ReactiveCocoa,dz1111/ReactiveCocoa,mxxiv/ReactiveCocoa
|
c
|
## Code Before:
//
// RACDelegateProxy.h
// ReactiveCocoa
//
// Created by Cody Krieger on 5/19/12.
// Copyright (c) 2012 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
// A delegate object suitable for using -rac_signalForSelector:fromProtocol:
// upon.
@interface RACDelegateProxy : NSObject
// The delegate to which messages should be forwarded if not handled by
// any -rac_signalForSelector:fromProtocol: applications.
@property (nonatomic, weak) id rac_proxiedDelegate;
// Creates a delegate proxy which will respond to selectors from `protocol`.
- (instancetype)initWithProtocol:(Protocol *)protocol;
@end
## Instruction:
Use unsafe_unretained for delegates, to support OS X
## Code After:
//
// RACDelegateProxy.h
// ReactiveCocoa
//
// Created by Cody Krieger on 5/19/12.
// Copyright (c) 2012 GitHub, Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
// A delegate object suitable for using -rac_signalForSelector:fromProtocol:
// upon.
@interface RACDelegateProxy : NSObject
// The delegate to which messages should be forwarded if not handled by
// any -rac_signalForSelector:fromProtocol: applications.
@property (nonatomic, unsafe_unretained) id rac_proxiedDelegate;
// Creates a delegate proxy which will respond to selectors from `protocol`.
- (instancetype)initWithProtocol:(Protocol *)protocol;
@end
|
# ... existing code ...
// The delegate to which messages should be forwarded if not handled by
// any -rac_signalForSelector:fromProtocol: applications.
@property (nonatomic, unsafe_unretained) id rac_proxiedDelegate;
// Creates a delegate proxy which will respond to selectors from `protocol`.
- (instancetype)initWithProtocol:(Protocol *)protocol;
# ... rest of the code ...
|
947470b6ae897dd4a340944206c80e39c5e20cc9
|
browser/atom_browser_main_parts.h
|
browser/atom_browser_main_parts.h
|
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#define ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#include "brightray/browser/browser_main_parts.h"
namespace atom {
class AtomBrowserBindings;
class Browser;
class NodeBindings;
class AtomBrowserMainParts : public brightray::BrowserMainParts {
public:
AtomBrowserMainParts();
virtual ~AtomBrowserMainParts();
static AtomBrowserMainParts* Get();
AtomBrowserBindings* atom_bindings() { return atom_bindings_.get(); }
Browser* browser() { return browser_.get(); }
protected:
// Implementations of brightray::BrowserMainParts.
virtual brightray::BrowserContext* CreateBrowserContext() OVERRIDE;
// Implementations of content::BrowserMainParts.
virtual void PostEarlyInitialization() OVERRIDE;
virtual void PreMainMessageLoopStart() OVERRIDE;
virtual void PreMainMessageLoopRun() OVERRIDE;
virtual void PostDestroyThreads() OVERRIDE;
private:
scoped_ptr<AtomBrowserBindings> atom_bindings_;
scoped_ptr<Browser> browser_;
scoped_ptr<NodeBindings> node_bindings_;
static AtomBrowserMainParts* self_;
DISALLOW_COPY_AND_ASSIGN(AtomBrowserMainParts);
};
} // namespace atom
#endif // ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
|
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#define ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#include "brightray/browser/browser_main_parts.h"
namespace atom {
class AtomBrowserBindings;
class Browser;
class NodeBindings;
class AtomBrowserMainParts : public brightray::BrowserMainParts {
public:
AtomBrowserMainParts();
virtual ~AtomBrowserMainParts();
static AtomBrowserMainParts* Get();
AtomBrowserBindings* atom_bindings() { return atom_bindings_.get(); }
Browser* browser() { return browser_.get(); }
protected:
// Implementations of brightray::BrowserMainParts.
virtual brightray::BrowserContext* CreateBrowserContext() OVERRIDE;
// Implementations of content::BrowserMainParts.
virtual void PostEarlyInitialization() OVERRIDE;
virtual void PreMainMessageLoopRun() OVERRIDE;
#if defined(OS_MACOSX)
virtual void PreMainMessageLoopStart() OVERRIDE;
virtual void PostDestroyThreads() OVERRIDE;
#endif
private:
scoped_ptr<AtomBrowserBindings> atom_bindings_;
scoped_ptr<Browser> browser_;
scoped_ptr<NodeBindings> node_bindings_;
static AtomBrowserMainParts* self_;
DISALLOW_COPY_AND_ASSIGN(AtomBrowserMainParts);
};
} // namespace atom
#endif // ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
|
Mark OS X only implementations in BrowserMainParts.
|
Mark OS X only implementations in BrowserMainParts.
|
C
|
mit
|
Jacobichou/electron,michaelchiche/electron,fritx/electron,chrisswk/electron,twolfson/electron,eriser/electron,Jacobichou/electron,John-Lin/electron,gabriel/electron,simonfork/electron,d-salas/electron,aliib/electron,GoooIce/electron,adamjgray/electron,JesselJohn/electron,Faiz7412/electron,ervinb/electron,Floato/electron,Zagorakiss/electron,faizalpribadi/electron,sircharleswatson/electron,sky7sea/electron,nicobot/electron,mirrh/electron,minggo/electron,miniak/electron,joneit/electron,icattlecoder/electron,jlord/electron,bpasero/electron,micalan/electron,ianscrivener/electron,shockone/electron,gerhardberger/electron,xiruibing/electron,mjaniszew/electron,mhkeller/electron,rreimann/electron,DivyaKMenon/electron,biblerule/UMCTelnetHub,neutrous/electron,LadyNaggaga/electron,sky7sea/electron,LadyNaggaga/electron,baiwyc119/electron,LadyNaggaga/electron,wan-qy/electron,setzer777/electron,jjz/electron,seanchas116/electron,yalexx/electron,cos2004/electron,sircharleswatson/electron,chrisswk/electron,anko/electron,medixdev/electron,mattotodd/electron,lrlna/electron,electron/electron,GoooIce/electron,jlord/electron,smczk/electron,etiktin/electron,GoooIce/electron,brenca/electron,lzpfmh/electron,mirrh/electron,dkfiresky/electron,saronwei/electron,bbondy/electron,chrisswk/electron,tinydew4/electron,minggo/electron,natgolov/electron,adcentury/electron,soulteary/electron,biblerule/UMCTelnetHub,robinvandernoord/electron,takashi/electron,adcentury/electron,adcentury/electron,adamjgray/electron,bruce/electron,MaxWhere/electron,kokdemo/electron,aecca/electron,Andrey-Pavlov/electron,sshiting/electron,roadev/electron,bobwol/electron,MaxWhere/electron,Jonekee/electron,hokein/atom-shell,gbn972/electron,bright-sparks/electron,seanchas116/electron,mrwizard82d1/electron,kostia/electron,jhen0409/electron,stevemao/electron,stevemao/electron,jjz/electron,thingsinjars/electron,kikong/electron,Evercoder/electron,trigrass2/electron,twolfson/electron,wolfflow/electron,fireball-x/atom-shell,shaundunne/electron,evgenyzinoviev/electron,Andrey-Pavlov/electron,fireball-x/atom-shell,nicholasess/electron,shaundunne/electron,tinydew4/electron,edulan/electron,John-Lin/electron,anko/electron,jonatasfreitasv/electron,JussMee15/electron,voidbridge/electron,JesselJohn/electron,jlhbaseball15/electron,GoooIce/electron,jjz/electron,jannishuebl/electron,tonyganch/electron,IonicaBizauKitchen/electron,BionicClick/electron,stevekinney/electron,dkfiresky/electron,leolujuyi/electron,sircharleswatson/electron,gabriel/electron,d-salas/electron,thompsonemerson/electron,MaxGraey/electron,fireball-x/atom-shell,Ivshti/electron,medixdev/electron,jaanus/electron,Evercoder/electron,simongregory/electron,rajatsingla28/electron,twolfson/electron,cos2004/electron,astoilkov/electron,dongjoon-hyun/electron,jlhbaseball15/electron,Zagorakiss/electron,Jonekee/electron,Ivshti/electron,ervinb/electron,Neron-X5/electron,Jonekee/electron,mjaniszew/electron,takashi/electron,leftstick/electron,coderhaoxin/electron,pandoraui/electron,chriskdon/electron,mattdesl/electron,pirafrank/electron,Evercoder/electron,lzpfmh/electron,the-ress/electron,fabien-d/electron,farmisen/electron,Jonekee/electron,biblerule/UMCTelnetHub,cos2004/electron,matiasinsaurralde/electron,nicholasess/electron,abhishekgahlot/electron,voidbridge/electron,ervinb/electron,icattlecoder/electron,brave/electron,wan-qy/electron,jacksondc/electron,Zagorakiss/electron,dahal/electron,joaomoreno/atom-shell,thomsonreuters/electron,eriser/electron,simongregory/electron,bpasero/electron,sircharleswatson/electron,RobertJGabriel/electron,preco21/electron,Floato/electron,carsonmcdonald/electron,JussMee15/electron,meowlab/electron,edulan/electron,posix4e/electron,shennushi/electron,matiasinsaurralde/electron,gabrielPeart/electron,kenmozi/electron,jannishuebl/electron,posix4e/electron,gamedevsam/electron,Neron-X5/electron,jsutcodes/electron,xfstudio/electron,bitemyapp/electron,setzer777/electron,jaanus/electron,Gerhut/electron,brave/electron,mirrh/electron,neutrous/electron,brave/electron,posix4e/electron,simonfork/electron,micalan/electron,preco21/electron,aichingm/electron,coderhaoxin/electron,biblerule/UMCTelnetHub,xiruibing/electron,xiruibing/electron,ankitaggarwal011/electron,rsvip/electron,joaomoreno/atom-shell,jjz/electron,kostia/electron,michaelchiche/electron,kazupon/electron,xiruibing/electron,shiftkey/electron,tylergibson/electron,deepak1556/atom-shell,cos2004/electron,Rokt33r/electron,anko/electron,d-salas/electron,bitemyapp/electron,aliib/electron,webmechanicx/electron,shaundunne/electron,ankitaggarwal011/electron,vaginessa/electron,saronwei/electron,RobertJGabriel/electron,webmechanicx/electron,pandoraui/electron,leftstick/electron,preco21/electron,jsutcodes/electron,eric-seekas/electron,meowlab/electron,shennushi/electron,ervinb/electron,jonatasfreitasv/electron,IonicaBizauKitchen/electron,egoist/electron,faizalpribadi/electron,jhen0409/electron,ankitaggarwal011/electron,arusakov/electron,tinydew4/electron,IonicaBizauKitchen/electron,gbn972/electron,sshiting/electron,LadyNaggaga/electron,mattdesl/electron,hokein/atom-shell,jtburke/electron,coderhaoxin/electron,aichingm/electron,RobertJGabriel/electron,minggo/electron,dahal/electron,kikong/electron,mrwizard82d1/electron,Andrey-Pavlov/electron,edulan/electron,roadev/electron,shockone/electron,pirafrank/electron,Faiz7412/electron,rhencke/electron,gstack/infinium-shell,simonfork/electron,iftekeriba/electron,jaanus/electron,shockone/electron,lrlna/electron,mhkeller/electron,benweissmann/electron,leolujuyi/electron,BionicClick/electron,roadev/electron,jtburke/electron,iftekeriba/electron,shiftkey/electron,faizalpribadi/electron,bruce/electron,kenmozi/electron,bright-sparks/electron,mhkeller/electron,MaxWhere/electron,Ivshti/electron,fomojola/electron,digideskio/electron,brave/muon,maxogden/atom-shell,oiledCode/electron,darwin/electron,arusakov/electron,Neron-X5/electron,xfstudio/electron,thompsonemerson/electron,anko/electron,benweissmann/electron,adamjgray/electron,yan-foto/electron,brave/muon,micalan/electron,mrwizard82d1/electron,davazp/electron,aecca/electron,pirafrank/electron,deepak1556/atom-shell,xfstudio/electron,gamedevsam/electron,systembugtj/electron,voidbridge/electron,kostia/electron,etiktin/electron,nicobot/electron,tinydew4/electron,yan-foto/electron,abhishekgahlot/electron,lzpfmh/electron,gerhardberger/electron,wan-qy/electron,shennushi/electron,kazupon/electron,deed02392/electron,SufianHassan/electron,pandoraui/electron,rajatsingla28/electron,mattotodd/electron,chriskdon/electron,jiaz/electron,jtburke/electron,thompsonemerson/electron,bwiggs/electron,mattotodd/electron,farmisen/electron,michaelchiche/electron,thompsonemerson/electron,bobwol/electron,eriser/electron,posix4e/electron,fritx/electron,soulteary/electron,evgenyzinoviev/electron,jlhbaseball15/electron,nekuz0r/electron,bright-sparks/electron,jhen0409/electron,thompsonemerson/electron,gbn972/electron,subblue/electron,darwin/electron,wolfflow/electron,voidbridge/electron,synaptek/electron,shaundunne/electron,robinvandernoord/electron,mubassirhayat/electron,jcblw/electron,wolfflow/electron,fabien-d/electron,trigrass2/electron,jacksondc/electron,joneit/electron,kenmozi/electron,rreimann/electron,bwiggs/electron,felixrieseberg/electron,simonfork/electron,biblerule/UMCTelnetHub,howmuchcomputer/electron,mattdesl/electron,tomashanacek/electron,the-ress/electron,thomsonreuters/electron,abhishekgahlot/electron,dahal/electron,noikiy/electron,seanchas116/electron,dahal/electron,oiledCode/electron,aecca/electron,bitemyapp/electron,aichingm/electron,stevekinney/electron,oiledCode/electron,leftstick/electron,sky7sea/electron,faizalpribadi/electron,destan/electron,mattdesl/electron,robinvandernoord/electron,LadyNaggaga/electron,darwin/electron,simongregory/electron,iftekeriba/electron,yan-foto/electron,howmuchcomputer/electron,sircharleswatson/electron,tincan24/electron,rprichard/electron,pandoraui/electron,destan/electron,jhen0409/electron,thingsinjars/electron,aliib/electron,jonatasfreitasv/electron,renaesop/electron,natgolov/electron,mrwizard82d1/electron,Zagorakiss/electron,timruffles/electron,jannishuebl/electron,Rokt33r/electron,synaptek/electron,brave/muon,stevekinney/electron,yalexx/electron,lzpfmh/electron,ianscrivener/electron,shiftkey/electron,brenca/electron,coderhaoxin/electron,dongjoon-hyun/electron,eric-seekas/electron,bpasero/electron,John-Lin/electron,posix4e/electron,zhakui/electron,rhencke/electron,GoooIce/electron,Jacobichou/electron,jonatasfreitasv/electron,medixdev/electron,wolfflow/electron,rajatsingla28/electron,tincan24/electron,aaron-goshine/electron,DivyaKMenon/electron,digideskio/electron,pandoraui/electron,neutrous/electron,ankitaggarwal011/electron,anko/electron,tincan24/electron,bitemyapp/electron,SufianHassan/electron,jcblw/electron,pirafrank/electron,gabriel/electron,jsutcodes/electron,mubassirhayat/electron,tomashanacek/electron,bitemyapp/electron,stevemao/electron,brave/muon,RIAEvangelist/electron,tomashanacek/electron,SufianHassan/electron,maxogden/atom-shell,rajatsingla28/electron,stevekinney/electron,Ivshti/electron,mjaniszew/electron,deed02392/electron,seanchas116/electron,kcrt/electron,roadev/electron,benweissmann/electron,aecca/electron,gabrielPeart/electron,saronwei/electron,bruce/electron,arturts/electron,wolfflow/electron,evgenyzinoviev/electron,kcrt/electron,vHanda/electron,abhishekgahlot/electron,Gerhut/electron,mrwizard82d1/electron,bbondy/electron,anko/electron,miniak/electron,baiwyc119/electron,kokdemo/electron,zhakui/electron,sshiting/electron,gabrielPeart/electron,sshiting/electron,aichingm/electron,greyhwndz/electron,matiasinsaurralde/electron,jaanus/electron,saronwei/electron,arturts/electron,shockone/electron,fabien-d/electron,yalexx/electron,mattotodd/electron,dkfiresky/electron,chriskdon/electron,simonfork/electron,edulan/electron,robinvandernoord/electron,vHanda/electron,benweissmann/electron,takashi/electron,rajatsingla28/electron,nicholasess/electron,smczk/electron,synaptek/electron,John-Lin/electron,Floato/electron,bpasero/electron,tinydew4/electron,deepak1556/atom-shell,jacksondc/electron,twolfson/electron,jlord/electron,JussMee15/electron,iftekeriba/electron,egoist/electron,brave/muon,micalan/electron,maxogden/atom-shell,gbn972/electron,aliib/electron,bitemyapp/electron,bobwol/electron,chriskdon/electron,gamedevsam/electron,icattlecoder/electron,electron/electron,arusakov/electron,electron/electron,astoilkov/electron,yalexx/electron,felixrieseberg/electron,arusakov/electron,bright-sparks/electron,thingsinjars/electron,renaesop/electron,nicholasess/electron,dongjoon-hyun/electron,saronwei/electron,takashi/electron,meowlab/electron,chrisswk/electron,cqqccqc/electron,Neron-X5/electron,joaomoreno/atom-shell,etiktin/electron,wan-qy/electron,jonatasfreitasv/electron,rajatsingla28/electron,destan/electron,BionicClick/electron,mattdesl/electron,yan-foto/electron,vHanda/electron,kenmozi/electron,timruffles/electron,brave/electron,gamedevsam/electron,tylergibson/electron,jacksondc/electron,mubassirhayat/electron,tonyganch/electron,mhkeller/electron,rprichard/electron,thingsinjars/electron,eric-seekas/electron,oiledCode/electron,kokdemo/electron,bobwol/electron,renaesop/electron,destan/electron,deepak1556/atom-shell,jcblw/electron,arusakov/electron,gabriel/electron,JesselJohn/electron,gstack/infinium-shell,gabriel/electron,adcentury/electron,brenca/electron,Faiz7412/electron,vaginessa/electron,matiasinsaurralde/electron,arusakov/electron,maxogden/atom-shell,felixrieseberg/electron,beni55/electron,thingsinjars/electron,voidbridge/electron,dahal/electron,noikiy/electron,mubassirhayat/electron,jlord/electron,cos2004/electron,lrlna/electron,Gerhut/electron,michaelchiche/electron,dongjoon-hyun/electron,MaxWhere/electron,leolujuyi/electron,jannishuebl/electron,RobertJGabriel/electron,neutrous/electron,benweissmann/electron,astoilkov/electron,kcrt/electron,kostia/electron,farmisen/electron,bruce/electron,rprichard/electron,nagyistoce/electron-atom-shell,sky7sea/electron,vipulroxx/electron,tylergibson/electron,vaginessa/electron,jsutcodes/electron,xiruibing/electron,baiwyc119/electron,kazupon/electron,fomojola/electron,IonicaBizauKitchen/electron,Ivshti/electron,bwiggs/electron,rsvip/electron,mubassirhayat/electron,LadyNaggaga/electron,nekuz0r/electron,jhen0409/electron,fritx/electron,deed02392/electron,chrisswk/electron,JussMee15/electron,astoilkov/electron,BionicClick/electron,SufianHassan/electron,fffej/electron,leftstick/electron,yalexx/electron,trankmichael/electron,nekuz0r/electron,tylergibson/electron,vHanda/electron,jsutcodes/electron,rsvip/electron,preco21/electron,meowlab/electron,Neron-X5/electron,GoooIce/electron,tinydew4/electron,fabien-d/electron,lrlna/electron,neutrous/electron,coderhaoxin/electron,mattdesl/electron,gabriel/electron,thomsonreuters/electron,RIAEvangelist/electron,DivyaKMenon/electron,evgenyzinoviev/electron,ianscrivener/electron,mirrh/electron,John-Lin/electron,dongjoon-hyun/electron,carsonmcdonald/electron,jiaz/electron,howmuchcomputer/electron,lrlna/electron,nicholasess/electron,vipulroxx/electron,nagyistoce/electron-atom-shell,eric-seekas/electron,stevemao/electron,etiktin/electron,yan-foto/electron,nagyistoce/electron-atom-shell,dkfiresky/electron,twolfson/electron,Andrey-Pavlov/electron,jlord/electron,fomojola/electron,DivyaKMenon/electron,systembugtj/electron,thomsonreuters/electron,thomsonreuters/electron,yan-foto/electron,miniak/electron,greyhwndz/electron,kikong/electron,ianscrivener/electron,medixdev/electron,nicobot/electron,bright-sparks/electron,deepak1556/atom-shell,MaxWhere/electron,baiwyc119/electron,tomashanacek/electron,rhencke/electron,xiruibing/electron,carsonmcdonald/electron,MaxGraey/electron,roadev/electron,arturts/electron,robinvandernoord/electron,takashi/electron,nicobot/electron,mhkeller/electron,jtburke/electron,systembugtj/electron,jaanus/electron,beni55/electron,bright-sparks/electron,kikong/electron,aecca/electron,arturts/electron,nicobot/electron,Jacobichou/electron,bruce/electron,biblerule/UMCTelnetHub,egoist/electron,smczk/electron,shockone/electron,zhakui/electron,felixrieseberg/electron,fireball-x/atom-shell,systembugtj/electron,shennushi/electron,rreimann/electron,shennushi/electron,arturts/electron,fritx/electron,jsutcodes/electron,Zagorakiss/electron,howmuchcomputer/electron,jlhbaseball15/electron,shiftkey/electron,gerhardberger/electron,destan/electron,micalan/electron,rhencke/electron,leftstick/electron,seanchas116/electron,gerhardberger/electron,noikiy/electron,christian-bromann/electron,gbn972/electron,Floato/electron,jannishuebl/electron,tomashanacek/electron,DivyaKMenon/electron,matiasinsaurralde/electron,cqqccqc/electron,kikong/electron,trankmichael/electron,systembugtj/electron,bobwol/electron,minggo/electron,davazp/electron,trigrass2/electron,gbn972/electron,jlhbaseball15/electron,beni55/electron,wolfflow/electron,farmisen/electron,mattotodd/electron,kazupon/electron,renaesop/electron,zhakui/electron,tonyganch/electron,noikiy/electron,sky7sea/electron,posix4e/electron,brenca/electron,jcblw/electron,ankitaggarwal011/electron,leethomas/electron,renaesop/electron,aaron-goshine/electron,evgenyzinoviev/electron,cqqccqc/electron,rreimann/electron,bpasero/electron,kokdemo/electron,adamjgray/electron,nekuz0r/electron,SufianHassan/electron,seanchas116/electron,digideskio/electron,Gerhut/electron,pombredanne/electron,baiwyc119/electron,renaesop/electron,SufianHassan/electron,beni55/electron,nagyistoce/electron-atom-shell,pirafrank/electron,kazupon/electron,trankmichael/electron,tincan24/electron,christian-bromann/electron,miniak/electron,RIAEvangelist/electron,Evercoder/electron,kenmozi/electron,JussMee15/electron,DivyaKMenon/electron,greyhwndz/electron,shaundunne/electron,the-ress/electron,nicobot/electron,fritx/electron,vHanda/electron,faizalpribadi/electron,setzer777/electron,webmechanicx/electron,fffej/electron,ankitaggarwal011/electron,dongjoon-hyun/electron,leftstick/electron,setzer777/electron,vaginessa/electron,trigrass2/electron,etiktin/electron,felixrieseberg/electron,mrwizard82d1/electron,leethomas/electron,brenca/electron,medixdev/electron,tincan24/electron,joneit/electron,shockone/electron,trankmichael/electron,arturts/electron,matiasinsaurralde/electron,John-Lin/electron,carsonmcdonald/electron,Floato/electron,fomojola/electron,bpasero/electron,Faiz7412/electron,bbondy/electron,JesselJohn/electron,gstack/infinium-shell,abhishekgahlot/electron,Jacobichou/electron,brave/electron,lrlna/electron,fabien-d/electron,gstack/infinium-shell,neutrous/electron,d-salas/electron,Jonekee/electron,joaomoreno/atom-shell,trigrass2/electron,oiledCode/electron,etiktin/electron,davazp/electron,fffej/electron,the-ress/electron,nekuz0r/electron,eric-seekas/electron,Neron-X5/electron,jcblw/electron,farmisen/electron,robinvandernoord/electron,Evercoder/electron,joaomoreno/atom-shell,miniak/electron,bwiggs/electron,stevemao/electron,stevekinney/electron,stevekinney/electron,vipulroxx/electron,ianscrivener/electron,smczk/electron,Andrey-Pavlov/electron,dkfiresky/electron,micalan/electron,beni55/electron,BionicClick/electron,jiaz/electron,jonatasfreitasv/electron,pombredanne/electron,pirafrank/electron,eric-seekas/electron,gamedevsam/electron,howmuchcomputer/electron,rreimann/electron,leolujuyi/electron,astoilkov/electron,vHanda/electron,soulteary/electron,kokdemo/electron,RIAEvangelist/electron,pombredanne/electron,kokdemo/electron,gerhardberger/electron,electron/electron,leethomas/electron,trigrass2/electron,JussMee15/electron,joneit/electron,thingsinjars/electron,MaxGraey/electron,the-ress/electron,dahal/electron,jannishuebl/electron,trankmichael/electron,simongregory/electron,leethomas/electron,greyhwndz/electron,christian-bromann/electron,baiwyc119/electron,rhencke/electron,synaptek/electron,thompsonemerson/electron,faizalpribadi/electron,smczk/electron,rsvip/electron,minggo/electron,IonicaBizauKitchen/electron,darwin/electron,Rokt33r/electron,adamjgray/electron,sshiting/electron,carsonmcdonald/electron,tonyganch/electron,deed02392/electron,natgolov/electron,gerhardberger/electron,kcrt/electron,JesselJohn/electron,cos2004/electron,edulan/electron,kcrt/electron,tylergibson/electron,vipulroxx/electron,MaxWhere/electron,aaron-goshine/electron,Andrey-Pavlov/electron,iftekeriba/electron,aliib/electron,Faiz7412/electron,michaelchiche/electron,fritx/electron,jjz/electron,minggo/electron,ianscrivener/electron,mattotodd/electron,MaxGraey/electron,tonyganch/electron,destan/electron,oiledCode/electron,lzpfmh/electron,mhkeller/electron,stevemao/electron,roadev/electron,mirrh/electron,leethomas/electron,adcentury/electron,Evercoder/electron,trankmichael/electron,deed02392/electron,christian-bromann/electron,michaelchiche/electron,natgolov/electron,meowlab/electron,pombredanne/electron,greyhwndz/electron,fffej/electron,aaron-goshine/electron,mirrh/electron,RIAEvangelist/electron,fffej/electron,jcblw/electron,miniak/electron,gabrielPeart/electron,Rokt33r/electron,brave/electron,aaron-goshine/electron,soulteary/electron,rreimann/electron,hokein/atom-shell,fomojola/electron,pandoraui/electron,jaanus/electron,egoist/electron,icattlecoder/electron,BionicClick/electron,greyhwndz/electron,timruffles/electron,zhakui/electron,gabrielPeart/electron,simonfork/electron,cqqccqc/electron,subblue/electron,cqqccqc/electron,meowlab/electron,electron/electron,aecca/electron,Jonekee/electron,timruffles/electron,icattlecoder/electron,tomashanacek/electron,egoist/electron,vaginessa/electron,bbondy/electron,noikiy/electron,mjaniszew/electron,Floato/electron,thomsonreuters/electron,nekuz0r/electron,benweissmann/electron,wan-qy/electron,xfstudio/electron,electron/electron,xfstudio/electron,coderhaoxin/electron,kostia/electron,sshiting/electron,Gerhut/electron,digideskio/electron,carsonmcdonald/electron,webmechanicx/electron,RIAEvangelist/electron,maxogden/atom-shell,mjaniszew/electron,kcrt/electron,bwiggs/electron,edulan/electron,aaron-goshine/electron,icattlecoder/electron,Rokt33r/electron,bbondy/electron,jhen0409/electron,davazp/electron,webmechanicx/electron,Zagorakiss/electron,RobertJGabriel/electron,hokein/atom-shell,jlhbaseball15/electron,shaundunne/electron,d-salas/electron,ervinb/electron,howmuchcomputer/electron,preco21/electron,brave/muon,davazp/electron,christian-bromann/electron,electron/electron,christian-bromann/electron,jacksondc/electron,jiaz/electron,jiaz/electron,cqqccqc/electron,felixrieseberg/electron,timruffles/electron,natgolov/electron,fomojola/electron,evgenyzinoviev/electron,leolujuyi/electron,IonicaBizauKitchen/electron,jtburke/electron,leolujuyi/electron,lzpfmh/electron,tylergibson/electron,twolfson/electron,vaginessa/electron,xfstudio/electron,tonyganch/electron,gabrielPeart/electron,subblue/electron,medixdev/electron,saronwei/electron,zhakui/electron,nagyistoce/electron-atom-shell,takashi/electron,fffej/electron,adamjgray/electron,voidbridge/electron,Jacobichou/electron,darwin/electron,fireball-x/atom-shell,systembugtj/electron,jiaz/electron,Rokt33r/electron,deed02392/electron,bruce/electron,synaptek/electron,tincan24/electron,vipulroxx/electron,MaxGraey/electron,farmisen/electron,adcentury/electron,jjz/electron,chriskdon/electron,shiftkey/electron,rsvip/electron,gstack/infinium-shell,shennushi/electron,digideskio/electron,synaptek/electron,joneit/electron,subblue/electron,leethomas/electron,astoilkov/electron,aichingm/electron,ervinb/electron,eriser/electron,natgolov/electron,simongregory/electron,JesselJohn/electron,subblue/electron,aichingm/electron,the-ress/electron,aliib/electron,wan-qy/electron,kostia/electron,shiftkey/electron,webmechanicx/electron,pombredanne/electron,digideskio/electron,abhishekgahlot/electron,chriskdon/electron,iftekeriba/electron,jtburke/electron,vipulroxx/electron,gamedevsam/electron,joneit/electron,rprichard/electron,jacksondc/electron,pombredanne/electron,eriser/electron,sircharleswatson/electron,preco21/electron,sky7sea/electron,rhencke/electron,yalexx/electron,mjaniszew/electron,joaomoreno/atom-shell,simongregory/electron,the-ress/electron,smczk/electron,kazupon/electron,setzer777/electron,kenmozi/electron,RobertJGabriel/electron,d-salas/electron,soulteary/electron,dkfiresky/electron,bpasero/electron,noikiy/electron,brenca/electron,gerhardberger/electron,setzer777/electron,egoist/electron,Gerhut/electron,bwiggs/electron,davazp/electron,nicholasess/electron,hokein/atom-shell,bobwol/electron,beni55/electron,bbondy/electron,eriser/electron,subblue/electron,soulteary/electron
|
c
|
## Code Before:
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#define ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#include "brightray/browser/browser_main_parts.h"
namespace atom {
class AtomBrowserBindings;
class Browser;
class NodeBindings;
class AtomBrowserMainParts : public brightray::BrowserMainParts {
public:
AtomBrowserMainParts();
virtual ~AtomBrowserMainParts();
static AtomBrowserMainParts* Get();
AtomBrowserBindings* atom_bindings() { return atom_bindings_.get(); }
Browser* browser() { return browser_.get(); }
protected:
// Implementations of brightray::BrowserMainParts.
virtual brightray::BrowserContext* CreateBrowserContext() OVERRIDE;
// Implementations of content::BrowserMainParts.
virtual void PostEarlyInitialization() OVERRIDE;
virtual void PreMainMessageLoopStart() OVERRIDE;
virtual void PreMainMessageLoopRun() OVERRIDE;
virtual void PostDestroyThreads() OVERRIDE;
private:
scoped_ptr<AtomBrowserBindings> atom_bindings_;
scoped_ptr<Browser> browser_;
scoped_ptr<NodeBindings> node_bindings_;
static AtomBrowserMainParts* self_;
DISALLOW_COPY_AND_ASSIGN(AtomBrowserMainParts);
};
} // namespace atom
#endif // ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
## Instruction:
Mark OS X only implementations in BrowserMainParts.
## Code After:
// Copyright (c) 2013 GitHub, Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#define ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
#include "brightray/browser/browser_main_parts.h"
namespace atom {
class AtomBrowserBindings;
class Browser;
class NodeBindings;
class AtomBrowserMainParts : public brightray::BrowserMainParts {
public:
AtomBrowserMainParts();
virtual ~AtomBrowserMainParts();
static AtomBrowserMainParts* Get();
AtomBrowserBindings* atom_bindings() { return atom_bindings_.get(); }
Browser* browser() { return browser_.get(); }
protected:
// Implementations of brightray::BrowserMainParts.
virtual brightray::BrowserContext* CreateBrowserContext() OVERRIDE;
// Implementations of content::BrowserMainParts.
virtual void PostEarlyInitialization() OVERRIDE;
virtual void PreMainMessageLoopRun() OVERRIDE;
#if defined(OS_MACOSX)
virtual void PreMainMessageLoopStart() OVERRIDE;
virtual void PostDestroyThreads() OVERRIDE;
#endif
private:
scoped_ptr<AtomBrowserBindings> atom_bindings_;
scoped_ptr<Browser> browser_;
scoped_ptr<NodeBindings> node_bindings_;
static AtomBrowserMainParts* self_;
DISALLOW_COPY_AND_ASSIGN(AtomBrowserMainParts);
};
} // namespace atom
#endif // ATOM_BROWSER_ATOM_BROWSER_MAIN_PARTS_
|
# ... existing code ...
// Implementations of content::BrowserMainParts.
virtual void PostEarlyInitialization() OVERRIDE;
virtual void PreMainMessageLoopRun() OVERRIDE;
#if defined(OS_MACOSX)
virtual void PreMainMessageLoopStart() OVERRIDE;
virtual void PostDestroyThreads() OVERRIDE;
#endif
private:
scoped_ptr<AtomBrowserBindings> atom_bindings_;
# ... rest of the code ...
|
15739812df972e8d79c5f9ff1ce1367c9bb7d168
|
src/test/java/com/fewlaps/quitnowsleepingtime/DefaultValuesTest.java
|
src/test/java/com/fewlaps/quitnowsleepingtime/DefaultValuesTest.java
|
package com.fewlaps.quitnowsleepingtime;
import org.junit.Test;
import java.util.Locale;
import static org.junit.Assert.assertEquals;
public class DefaultValuesTest {
@Test
public void shouldUseLocalLocaleByDefault() {
SleepingTime defaultLocaleST = new SleepingTime();
SleepingTime localLocaleST = new SleepingTime(Locale.getDefault().getCountry());
assertEquals(localLocaleST.getBedtime(), defaultLocaleST.getBedtime());
assertEquals(localLocaleST.getWakeUp(), defaultLocaleST.getWakeUp());
}
}
|
package com.fewlaps.quitnowsleepingtime;
import org.junit.Test;
import java.util.Locale;
import static com.fewlaps.quitnowsleepingtime.SleepingZone.DEFAULT_COUNTRY_CODE;
import static org.junit.Assert.assertEquals;
public class DefaultValuesTest {
@Test
public void shouldUseLocalLocaleByDefault() {
SleepingTime defaultLocaleST = new SleepingTime();
SleepingTime localLocaleST = new SleepingTime(Locale.getDefault().getCountry());
assertEquals(localLocaleST.getBedtime(), defaultLocaleST.getBedtime());
assertEquals(localLocaleST.getWakeUp(), defaultLocaleST.getWakeUp());
}
@Test
public void shouldUseDefaultCountryCodeIfNotExistingLocale() {
SleepingTime defaultCountryCodeSleepingTime = new SleepingTime(DEFAULT_COUNTRY_CODE);
SleepingTime wrongLocaleSleepingTime = new SleepingTime("XX");
assertEquals(wrongLocaleSleepingTime.getBedtime(), defaultCountryCodeSleepingTime.getBedtime());
assertEquals(wrongLocaleSleepingTime.getWakeUp(), defaultCountryCodeSleepingTime.getWakeUp());
}
}
|
Add tests for not existing contry codes
|
Add tests for not existing contry codes
|
Java
|
mit
|
Fewlaps/quitnow-sleep-time
|
java
|
## Code Before:
package com.fewlaps.quitnowsleepingtime;
import org.junit.Test;
import java.util.Locale;
import static org.junit.Assert.assertEquals;
public class DefaultValuesTest {
@Test
public void shouldUseLocalLocaleByDefault() {
SleepingTime defaultLocaleST = new SleepingTime();
SleepingTime localLocaleST = new SleepingTime(Locale.getDefault().getCountry());
assertEquals(localLocaleST.getBedtime(), defaultLocaleST.getBedtime());
assertEquals(localLocaleST.getWakeUp(), defaultLocaleST.getWakeUp());
}
}
## Instruction:
Add tests for not existing contry codes
## Code After:
package com.fewlaps.quitnowsleepingtime;
import org.junit.Test;
import java.util.Locale;
import static com.fewlaps.quitnowsleepingtime.SleepingZone.DEFAULT_COUNTRY_CODE;
import static org.junit.Assert.assertEquals;
public class DefaultValuesTest {
@Test
public void shouldUseLocalLocaleByDefault() {
SleepingTime defaultLocaleST = new SleepingTime();
SleepingTime localLocaleST = new SleepingTime(Locale.getDefault().getCountry());
assertEquals(localLocaleST.getBedtime(), defaultLocaleST.getBedtime());
assertEquals(localLocaleST.getWakeUp(), defaultLocaleST.getWakeUp());
}
@Test
public void shouldUseDefaultCountryCodeIfNotExistingLocale() {
SleepingTime defaultCountryCodeSleepingTime = new SleepingTime(DEFAULT_COUNTRY_CODE);
SleepingTime wrongLocaleSleepingTime = new SleepingTime("XX");
assertEquals(wrongLocaleSleepingTime.getBedtime(), defaultCountryCodeSleepingTime.getBedtime());
assertEquals(wrongLocaleSleepingTime.getWakeUp(), defaultCountryCodeSleepingTime.getWakeUp());
}
}
|
...
import java.util.Locale;
import static com.fewlaps.quitnowsleepingtime.SleepingZone.DEFAULT_COUNTRY_CODE;
import static org.junit.Assert.assertEquals;
public class DefaultValuesTest {
...
assertEquals(localLocaleST.getBedtime(), defaultLocaleST.getBedtime());
assertEquals(localLocaleST.getWakeUp(), defaultLocaleST.getWakeUp());
}
@Test
public void shouldUseDefaultCountryCodeIfNotExistingLocale() {
SleepingTime defaultCountryCodeSleepingTime = new SleepingTime(DEFAULT_COUNTRY_CODE);
SleepingTime wrongLocaleSleepingTime = new SleepingTime("XX");
assertEquals(wrongLocaleSleepingTime.getBedtime(), defaultCountryCodeSleepingTime.getBedtime());
assertEquals(wrongLocaleSleepingTime.getWakeUp(), defaultCountryCodeSleepingTime.getWakeUp());
}
}
...
|
8a4b576d6df4ef1f174c8698ff9a86dbf2f5bd4a
|
workshops/models.py
|
workshops/models.py
|
from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5)
|
from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5) if self.price else None
|
Check price exists before using it
|
Check price exists before using it
|
Python
|
bsd-3-clause
|
WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web
|
python
|
## Code Before:
from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5)
## Instruction:
Check price exists before using it
## Code After:
from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5) if self.price else None
|
...
@property
def approximate_euro_price(self):
return int(self.price / 7.5) if self.price else None
...
|
5a3dbe0367b7e734ea3668b33aa556a15b49e4b5
|
gwt-example/src/main/java/org/realityforge/arez/gwt/examples/ExampleUtil.java
|
gwt-example/src/main/java/org/realityforge/arez/gwt/examples/ExampleUtil.java
|
package org.realityforge.arez.gwt.examples;
import elemental2.dom.DomGlobal;
import javax.annotation.Nonnull;
import org.realityforge.arez.Arez;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.extras.WhyRun;
final class ExampleUtil
{
private ExampleUtil()
{
}
static void whyRun()
{
DomGlobal.console.log( new WhyRun( Arez.context().getSpy() ).whyRun() );
}
static void logAllErrors( @Nonnull final ArezContext context )
{
context.addObserverErrorHandler( ( observer, error, throwable ) -> {
DomGlobal.console.log( "Observer error: " + error + "\nobserver: " + observer );
if ( null != throwable )
{
DomGlobal.console.log( throwable );
}
} );
}
}
|
package org.realityforge.arez.gwt.examples;
import elemental2.dom.DomGlobal;
import javax.annotation.Nonnull;
import org.realityforge.arez.Arez;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.extras.WhyRun;
final class ExampleUtil
{
private ExampleUtil()
{
}
static void spyEvents()
{
Arez.context().getSpy().addSpyEventHandler( SpyUtil::emitEvent );
}
static void whyRun()
{
DomGlobal.console.log( new WhyRun( Arez.context().getSpy() ).whyRun() );
}
static void logAllErrors( @Nonnull final ArezContext context )
{
context.addObserverErrorHandler( ( observer, error, throwable ) -> {
DomGlobal.console.log( "Observer error: " + error + "\nobserver: " + observer );
if ( null != throwable )
{
DomGlobal.console.log( throwable );
}
} );
}
}
|
Add utility to spy events
|
Add utility to spy events
|
Java
|
apache-2.0
|
realityforge/arez,realityforge/arez,realityforge/arez
|
java
|
## Code Before:
package org.realityforge.arez.gwt.examples;
import elemental2.dom.DomGlobal;
import javax.annotation.Nonnull;
import org.realityforge.arez.Arez;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.extras.WhyRun;
final class ExampleUtil
{
private ExampleUtil()
{
}
static void whyRun()
{
DomGlobal.console.log( new WhyRun( Arez.context().getSpy() ).whyRun() );
}
static void logAllErrors( @Nonnull final ArezContext context )
{
context.addObserverErrorHandler( ( observer, error, throwable ) -> {
DomGlobal.console.log( "Observer error: " + error + "\nobserver: " + observer );
if ( null != throwable )
{
DomGlobal.console.log( throwable );
}
} );
}
}
## Instruction:
Add utility to spy events
## Code After:
package org.realityforge.arez.gwt.examples;
import elemental2.dom.DomGlobal;
import javax.annotation.Nonnull;
import org.realityforge.arez.Arez;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.extras.WhyRun;
final class ExampleUtil
{
private ExampleUtil()
{
}
static void spyEvents()
{
Arez.context().getSpy().addSpyEventHandler( SpyUtil::emitEvent );
}
static void whyRun()
{
DomGlobal.console.log( new WhyRun( Arez.context().getSpy() ).whyRun() );
}
static void logAllErrors( @Nonnull final ArezContext context )
{
context.addObserverErrorHandler( ( observer, error, throwable ) -> {
DomGlobal.console.log( "Observer error: " + error + "\nobserver: " + observer );
if ( null != throwable )
{
DomGlobal.console.log( throwable );
}
} );
}
}
|
# ... existing code ...
{
private ExampleUtil()
{
}
static void spyEvents()
{
Arez.context().getSpy().addSpyEventHandler( SpyUtil::emitEvent );
}
static void whyRun()
# ... rest of the code ...
|
28b067ab7fc7385ac5462eb6c9f9371cef9eb496
|
ritter/dataprocessors/annotators.py
|
ritter/dataprocessors/annotators.py
|
import re
class ArtifactAnnotator:
def linkify_artifacts(marked_tree, artifacts):
big_string = ArtifactAnnotator._marked_tree_to_big_string(marked_tree)
for artifact in artifacts:
link = '(%s "GHOSTDOC-TOKEN")' % artifact['_id']
for token in artifact['tokens']:
reg = ArtifactAnnotator._token_reg(token)
repl = r'[\1]%s' % link
big_string = reg.sub(repl, big_string)
ArtifactAnnotator._big_string_to_marked_tree(marked_tree, big_string)
return marked_tree
def _token_reg(token):
reg = r'(\b%s)' % token
return re.compile(reg, re.IGNORECASE)
def _marked_tree_to_big_string(marked_tree):
strings = []
for item in marked_tree:
if 'text' in item and item['type'] != 'heading' and item[
'type'] != 'code':
strings.append(item['text'])
big_string = u'\u1394'.join(strings)
return big_string
def _big_string_to_marked_tree(marked_tree, big_string):
strings = big_string.split(u'\u1394')
i = 0
for item in marked_tree:
if 'text' in item and item['type'] != 'heading' and item[
'type'] != 'code':
item['text'] = strings[i]
i = i + 1
|
import re
class ArtifactAnnotator:
excluded_types = set(['heading', 'code'])
def linkify_artifacts(marked_tree, artifacts):
big_string = ArtifactAnnotator._marked_tree_to_big_string(marked_tree)
for artifact in artifacts:
link = '(%s "GHOSTDOC-TOKEN")' % artifact['_id']
for token in artifact['tokens']:
reg = ArtifactAnnotator._token_reg(token)
repl = r'[\1]%s' % link
big_string = reg.sub(repl, big_string)
ArtifactAnnotator._big_string_to_marked_tree(marked_tree, big_string)
return marked_tree
def _token_reg(token):
reg = r'(\b%s)' % token
return re.compile(reg, re.IGNORECASE)
def _marked_tree_to_big_string(marked_tree):
strings = []
for item in marked_tree:
if 'text' in item and item['type'] not in ArtifactAnnotator.excluded_types:
strings.append(item['text'])
big_string = u'\u1394'.join(strings)
return big_string
def _big_string_to_marked_tree(marked_tree, big_string):
strings = big_string.split(u'\u1394')
i = 0
for item in marked_tree:
if 'text' in item and item['type'] not in ArtifactAnnotator.excluded_types
item['text'] = strings[i]
i = i + 1
|
Improve annotating of code segements
|
feat: Improve annotating of code segements
|
Python
|
mit
|
ErikGartner/ghostdoc-ritter
|
python
|
## Code Before:
import re
class ArtifactAnnotator:
def linkify_artifacts(marked_tree, artifacts):
big_string = ArtifactAnnotator._marked_tree_to_big_string(marked_tree)
for artifact in artifacts:
link = '(%s "GHOSTDOC-TOKEN")' % artifact['_id']
for token in artifact['tokens']:
reg = ArtifactAnnotator._token_reg(token)
repl = r'[\1]%s' % link
big_string = reg.sub(repl, big_string)
ArtifactAnnotator._big_string_to_marked_tree(marked_tree, big_string)
return marked_tree
def _token_reg(token):
reg = r'(\b%s)' % token
return re.compile(reg, re.IGNORECASE)
def _marked_tree_to_big_string(marked_tree):
strings = []
for item in marked_tree:
if 'text' in item and item['type'] != 'heading' and item[
'type'] != 'code':
strings.append(item['text'])
big_string = u'\u1394'.join(strings)
return big_string
def _big_string_to_marked_tree(marked_tree, big_string):
strings = big_string.split(u'\u1394')
i = 0
for item in marked_tree:
if 'text' in item and item['type'] != 'heading' and item[
'type'] != 'code':
item['text'] = strings[i]
i = i + 1
## Instruction:
feat: Improve annotating of code segements
## Code After:
import re
class ArtifactAnnotator:
excluded_types = set(['heading', 'code'])
def linkify_artifacts(marked_tree, artifacts):
big_string = ArtifactAnnotator._marked_tree_to_big_string(marked_tree)
for artifact in artifacts:
link = '(%s "GHOSTDOC-TOKEN")' % artifact['_id']
for token in artifact['tokens']:
reg = ArtifactAnnotator._token_reg(token)
repl = r'[\1]%s' % link
big_string = reg.sub(repl, big_string)
ArtifactAnnotator._big_string_to_marked_tree(marked_tree, big_string)
return marked_tree
def _token_reg(token):
reg = r'(\b%s)' % token
return re.compile(reg, re.IGNORECASE)
def _marked_tree_to_big_string(marked_tree):
strings = []
for item in marked_tree:
if 'text' in item and item['type'] not in ArtifactAnnotator.excluded_types:
strings.append(item['text'])
big_string = u'\u1394'.join(strings)
return big_string
def _big_string_to_marked_tree(marked_tree, big_string):
strings = big_string.split(u'\u1394')
i = 0
for item in marked_tree:
if 'text' in item and item['type'] not in ArtifactAnnotator.excluded_types
item['text'] = strings[i]
i = i + 1
|
// ... existing code ...
class ArtifactAnnotator:
excluded_types = set(['heading', 'code'])
def linkify_artifacts(marked_tree, artifacts):
big_string = ArtifactAnnotator._marked_tree_to_big_string(marked_tree)
// ... modified code ...
def _marked_tree_to_big_string(marked_tree):
strings = []
for item in marked_tree:
if 'text' in item and item['type'] not in ArtifactAnnotator.excluded_types:
strings.append(item['text'])
big_string = u'\u1394'.join(strings)
return big_string
...
strings = big_string.split(u'\u1394')
i = 0
for item in marked_tree:
if 'text' in item and item['type'] not in ArtifactAnnotator.excluded_types
item['text'] = strings[i]
i = i + 1
// ... rest of the code ...
|
2e1f4ffa667bcff2c10caf64be345f3e8619232f
|
python/simple_types.py
|
python/simple_types.py
|
assert(type(5) == int)
assert(type(True) == bool)
assert(type(5.7) == float)
assert(type(9 + 5j) == complex)
assert(type((8, 'dog', False)) == tuple)
assert(type('hello') == str)
assert(type(b'hello') == bytes)
assert(type([1, '', False]) == list)
assert(type(range(1,10)) == range)
assert(type({1, 2, 3}) == set)
assert(type(frozenset([1, 2, 3])) == frozenset)
assert(type({'x': 1, 'y': 2}) == dict)
assert(type(slice([1, 2, 3])) == slice)
# Some do not, but we can still "see" them
assert(str(type(None)) == "<class 'NoneType'>")
assert(str(type(NotImplemented)) == "<class 'NotImplementedType'>")
# Built-in vs. User-defined functions
def plus_two(x):
return x + 2
assert(str(type(plus_two)) == "<class 'function'>")
assert(str(type(max)) == "<class 'builtin_function_or_method'>")
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
|
assert(type(5) == int)
assert(type(True) == bool)
assert(type(5.7) == float)
assert(type(9 + 5j) == complex)
assert(type((8, 'dog', False)) == tuple)
assert(type('hello') == str)
assert(type(b'hello') == bytes)
assert(type([1, '', False]) == list)
assert(type(range(1,10)) == range)
assert(type({1, 2, 3}) == set)
assert(type(frozenset([1, 2, 3])) == frozenset)
assert(type({'x': 1, 'y': 2}) == dict)
assert(type(slice([1, 2, 3])) == slice)
# Some do not, but we can still "see" them
assert(str(type(None)) == "<class 'NoneType'>")
assert(str(type(NotImplemented)) == "<class 'NotImplementedType'>")
# Built-in vs. User-defined functions
def plus_two(x):
return x + 2
assert(str(type(plus_two)) == "<class 'function'>")
assert(str(type(max)) == "<class 'builtin_function_or_method'>")
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
# Many built-in modules define their own types
from datetime import date
assert(type(date(1969,7,20)) == date)
|
Add example of date type in Python
|
Add example of date type in Python
|
Python
|
mit
|
rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot
|
python
|
## Code Before:
assert(type(5) == int)
assert(type(True) == bool)
assert(type(5.7) == float)
assert(type(9 + 5j) == complex)
assert(type((8, 'dog', False)) == tuple)
assert(type('hello') == str)
assert(type(b'hello') == bytes)
assert(type([1, '', False]) == list)
assert(type(range(1,10)) == range)
assert(type({1, 2, 3}) == set)
assert(type(frozenset([1, 2, 3])) == frozenset)
assert(type({'x': 1, 'y': 2}) == dict)
assert(type(slice([1, 2, 3])) == slice)
# Some do not, but we can still "see" them
assert(str(type(None)) == "<class 'NoneType'>")
assert(str(type(NotImplemented)) == "<class 'NotImplementedType'>")
# Built-in vs. User-defined functions
def plus_two(x):
return x + 2
assert(str(type(plus_two)) == "<class 'function'>")
assert(str(type(max)) == "<class 'builtin_function_or_method'>")
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
## Instruction:
Add example of date type in Python
## Code After:
assert(type(5) == int)
assert(type(True) == bool)
assert(type(5.7) == float)
assert(type(9 + 5j) == complex)
assert(type((8, 'dog', False)) == tuple)
assert(type('hello') == str)
assert(type(b'hello') == bytes)
assert(type([1, '', False]) == list)
assert(type(range(1,10)) == range)
assert(type({1, 2, 3}) == set)
assert(type(frozenset([1, 2, 3])) == frozenset)
assert(type({'x': 1, 'y': 2}) == dict)
assert(type(slice([1, 2, 3])) == slice)
# Some do not, but we can still "see" them
assert(str(type(None)) == "<class 'NoneType'>")
assert(str(type(NotImplemented)) == "<class 'NotImplementedType'>")
# Built-in vs. User-defined functions
def plus_two(x):
return x + 2
assert(str(type(plus_two)) == "<class 'function'>")
assert(str(type(max)) == "<class 'builtin_function_or_method'>")
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
# Many built-in modules define their own types
from datetime import date
assert(type(date(1969,7,20)) == date)
|
// ... existing code ...
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
# Many built-in modules define their own types
from datetime import date
assert(type(date(1969,7,20)) == date)
// ... rest of the code ...
|
aca7c4ef6998786abfd2119fee26e1d94d501c16
|
_build/drake-build.py
|
_build/drake-build.py
|
import sys
sys.path.append('../src')
import drake
drake.run('..')
|
import os
import sys
sys.path.append('../src')
if 'PYTHONPATH' in os.environ:
os.environ['PYTHONPATH'] = '../src:%s' % os.environ['PYTHONPATH']
else:
os.environ['PYTHONPATH'] = '../src'
import drake
drake.run('..')
|
Add drake in the PYTHONPATH for the tests.
|
Add drake in the PYTHONPATH for the tests.
|
Python
|
agpl-3.0
|
mefyl/drake,mefyl/drake,mefyl/drake,infinit/drake,infinit/drake,infinit/drake
|
python
|
## Code Before:
import sys
sys.path.append('../src')
import drake
drake.run('..')
## Instruction:
Add drake in the PYTHONPATH for the tests.
## Code After:
import os
import sys
sys.path.append('../src')
if 'PYTHONPATH' in os.environ:
os.environ['PYTHONPATH'] = '../src:%s' % os.environ['PYTHONPATH']
else:
os.environ['PYTHONPATH'] = '../src'
import drake
drake.run('..')
|
# ... existing code ...
import os
import sys
sys.path.append('../src')
if 'PYTHONPATH' in os.environ:
os.environ['PYTHONPATH'] = '../src:%s' % os.environ['PYTHONPATH']
else:
os.environ['PYTHONPATH'] = '../src'
import drake
# ... rest of the code ...
|
37f28dba866ffa3457a4f14a7d3e74e8e88a1dd0
|
testing/get_value_test.py
|
testing/get_value_test.py
|
from __future__ import print_function
import sys
import numpy as np
from bmi import MyBMI
def print_var_values (bmi, var_name):
s = ', '.join ([str (x) for x in bmi.get_value (var_name)])
print ('%s' % s)
def run ():
bmi = MyBMI ()
bmi.initialize (None)
print ('%s' % bmi.get_component_name ())
for i in range (10):
print ('Time %d: ' % i, end='')
print_var_values (bmi, 'height_above_sea_floor')
bmi.update ()
print ('Time %d: ' % i, end='')
print_var_values (bmi, 'height_above_sea_floor')
bmi.finalize ()
if __name__ == '__main__':
run ()
|
from __future__ import print_function
import sys
import numpy as np
from poisson import BmiPoisson
def main():
model = BmiPoisson()
model.initialize()
print('%s' % model.get_component_name ())
for i in xrange(10):
print('Time %d' % i)
np.savetxt(sys.stdout, model.get_value('land_surface__elevation'),
fmt='%.3f')
model.update()
print('Time %d' % i)
np.savetxt(sys.stdout, model.get_value('land_surface__elevation'),
fmt='%.3f')
model.finalize()
if __name__ == '__main__':
main()
|
Update to use new bmi model.
|
Update to use new bmi model.
|
Python
|
mit
|
mperignon/bmi-STM,mperignon/bmi-delta,mperignon/bmi-STM,mperignon/bmi-delta
|
python
|
## Code Before:
from __future__ import print_function
import sys
import numpy as np
from bmi import MyBMI
def print_var_values (bmi, var_name):
s = ', '.join ([str (x) for x in bmi.get_value (var_name)])
print ('%s' % s)
def run ():
bmi = MyBMI ()
bmi.initialize (None)
print ('%s' % bmi.get_component_name ())
for i in range (10):
print ('Time %d: ' % i, end='')
print_var_values (bmi, 'height_above_sea_floor')
bmi.update ()
print ('Time %d: ' % i, end='')
print_var_values (bmi, 'height_above_sea_floor')
bmi.finalize ()
if __name__ == '__main__':
run ()
## Instruction:
Update to use new bmi model.
## Code After:
from __future__ import print_function
import sys
import numpy as np
from poisson import BmiPoisson
def main():
model = BmiPoisson()
model.initialize()
print('%s' % model.get_component_name ())
for i in xrange(10):
print('Time %d' % i)
np.savetxt(sys.stdout, model.get_value('land_surface__elevation'),
fmt='%.3f')
model.update()
print('Time %d' % i)
np.savetxt(sys.stdout, model.get_value('land_surface__elevation'),
fmt='%.3f')
model.finalize()
if __name__ == '__main__':
main()
|
// ... existing code ...
import sys
import numpy as np
from poisson import BmiPoisson
def main():
model = BmiPoisson()
model.initialize()
print('%s' % model.get_component_name ())
for i in xrange(10):
print('Time %d' % i)
np.savetxt(sys.stdout, model.get_value('land_surface__elevation'),
fmt='%.3f')
model.update()
print('Time %d' % i)
np.savetxt(sys.stdout, model.get_value('land_surface__elevation'),
fmt='%.3f')
model.finalize()
if __name__ == '__main__':
main()
// ... rest of the code ...
|
ddb5650f2820329f818cc57859610db1ccc45434
|
src/hid2hci.c
|
src/hid2hci.c
|
int main (int argc, char ** argv) {
char data[] = { 0x01, 0x05, 0, 0, 0, 0, 0, 0, 0 };
libusb_init(NULL);
libusb_device_handle* h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100b);
if (!h) {
printf("No device in HID mode found\n");
} else {
libusb_detach_kernel_driver(h, 0);
printf("%d\n", libusb_claim_interface(h, 0));
libusb_control_transfer(h, LIBUSB_ENDPOINT_OUT|LIBUSB_REQUEST_TYPE_CLASS|LIBUSB_RECIPIENT_INTERFACE, LIBUSB_REQUEST_GET_CONFIGURATION, 0x0301, 0, data, 9, 10000);
libusb_release_interface(h, 0);
libusb_close(h);
}
libusb_exit(NULL);
return 0;
}
|
int main (int argc, char ** argv) {
char data[] = { 0x01, 0x05, 0, 0, 0, 0, 0, 0, 0 };
libusb_init(NULL);
/* using the default pskeys, devices from the factory are a12:100d in HID mode */
libusb_device_handle* h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100b);
if (!h)
/* Alternatively, a12:100c can be set by the dongler to prevent CSR's software
stack from auto-switching to HCI mode */
h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100c);
if (!h) {
printf("No device in HID mode found\n");
} else {
libusb_detach_kernel_driver(h, 0);
printf("This should say 0: %d\n", libusb_claim_interface(h, 0));
libusb_control_transfer(h, LIBUSB_ENDPOINT_OUT|LIBUSB_REQUEST_TYPE_CLASS|LIBUSB_RECIPIENT_INTERFACE, LIBUSB_REQUEST_SET_CONFIGURATION, 0x0301, 0, data, 9, 10000);
libusb_release_interface(h, 0);
libusb_close(h);
}
libusb_exit(NULL);
return 0;
}
|
Fix the constants so it will work correctly. Also add the possibility of a different Product ID so the dongles can be set to a mode that Windows machines with the CSR drivers won't switch them to HCI mode.
|
Fix the constants so it will work correctly. Also add the possibility of a different Product ID so
the dongles can be set to a mode that Windows machines with the CSR drivers won't switch them to HCI mode.
|
C
|
mit
|
wmertens/textblade-dongler,wmertens/textblade-dongler
|
c
|
## Code Before:
int main (int argc, char ** argv) {
char data[] = { 0x01, 0x05, 0, 0, 0, 0, 0, 0, 0 };
libusb_init(NULL);
libusb_device_handle* h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100b);
if (!h) {
printf("No device in HID mode found\n");
} else {
libusb_detach_kernel_driver(h, 0);
printf("%d\n", libusb_claim_interface(h, 0));
libusb_control_transfer(h, LIBUSB_ENDPOINT_OUT|LIBUSB_REQUEST_TYPE_CLASS|LIBUSB_RECIPIENT_INTERFACE, LIBUSB_REQUEST_GET_CONFIGURATION, 0x0301, 0, data, 9, 10000);
libusb_release_interface(h, 0);
libusb_close(h);
}
libusb_exit(NULL);
return 0;
}
## Instruction:
Fix the constants so it will work correctly. Also add the possibility of a different Product ID so
the dongles can be set to a mode that Windows machines with the CSR drivers won't switch them to HCI mode.
## Code After:
int main (int argc, char ** argv) {
char data[] = { 0x01, 0x05, 0, 0, 0, 0, 0, 0, 0 };
libusb_init(NULL);
/* using the default pskeys, devices from the factory are a12:100d in HID mode */
libusb_device_handle* h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100b);
if (!h)
/* Alternatively, a12:100c can be set by the dongler to prevent CSR's software
stack from auto-switching to HCI mode */
h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100c);
if (!h) {
printf("No device in HID mode found\n");
} else {
libusb_detach_kernel_driver(h, 0);
printf("This should say 0: %d\n", libusb_claim_interface(h, 0));
libusb_control_transfer(h, LIBUSB_ENDPOINT_OUT|LIBUSB_REQUEST_TYPE_CLASS|LIBUSB_RECIPIENT_INTERFACE, LIBUSB_REQUEST_SET_CONFIGURATION, 0x0301, 0, data, 9, 10000);
libusb_release_interface(h, 0);
libusb_close(h);
}
libusb_exit(NULL);
return 0;
}
|
// ... existing code ...
int main (int argc, char ** argv) {
char data[] = { 0x01, 0x05, 0, 0, 0, 0, 0, 0, 0 };
libusb_init(NULL);
/* using the default pskeys, devices from the factory are a12:100d in HID mode */
libusb_device_handle* h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100b);
if (!h)
/* Alternatively, a12:100c can be set by the dongler to prevent CSR's software
stack from auto-switching to HCI mode */
h = libusb_open_device_with_vid_pid(NULL, 0x0a12, 0x100c);
if (!h) {
printf("No device in HID mode found\n");
} else {
libusb_detach_kernel_driver(h, 0);
printf("This should say 0: %d\n", libusb_claim_interface(h, 0));
libusb_control_transfer(h, LIBUSB_ENDPOINT_OUT|LIBUSB_REQUEST_TYPE_CLASS|LIBUSB_RECIPIENT_INTERFACE, LIBUSB_REQUEST_SET_CONFIGURATION, 0x0301, 0, data, 9, 10000);
libusb_release_interface(h, 0);
libusb_close(h);
}
// ... rest of the code ...
|
954fae8ece0c1f2c36a9f8eace9d060546022b2e
|
filters/tests/config_test.py
|
filters/tests/config_test.py
|
from __future__ import absolute_import
import unittest
from flask import Flask
from .. import config
app = Flask('__config_test')
class GetFuncsTest(unittest.TestCase):
"""All tests for get funcs function."""
def test_get_module_funcs(self):
"""Test the return value."""
self.assertIsInstance(config._get_funcs('__main__'), dict)
class InjectFiltersTest(unittest.TestCase):
"""All tests for inject filters function."""
def test_inject_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config._inject_filters(app, {}), Flask)
def test_inject_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config._inject_filters(app, {'foo': lambda x: x})
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
assert 'foo' in app.jinja_env.filters
class ConfigFlaskFiltersTest(unittest.TestCase):
"""All tests for config flask filters function."""
def test_config_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config.config_flask_filters(app), Flask)
def test_config_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config.config_flask_filters(app)
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
|
"""Test configuration utilities."""
from __future__ import absolute_import
import unittest
from flask import Flask
from .. import config
app = Flask('__config_test')
class GetFuncsTest(unittest.TestCase):
"""All tests for get funcs function."""
def test_get_module_funcs(self):
"""Test the return value."""
self.assertIsInstance(config._get_funcs(config), dict)
def test_get_module_funcs_notempty(self):
"""Test the return value functions length."""
self.assertGreater(len(config._get_funcs(config).items()), 0)
class InjectFiltersTest(unittest.TestCase):
"""All tests for inject filters function."""
def test_inject_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config._inject_filters(app, {}), Flask)
def test_inject_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config._inject_filters(app, {'foo': lambda x: x})
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
assert 'foo' in app.jinja_env.filters
class ConfigFlaskFiltersTest(unittest.TestCase):
"""All tests for config flask filters function."""
def test_config_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config.config_flask_filters(app), Flask)
def test_config_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config.config_flask_filters(app)
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
|
Remove protected class access, add module docstrings.
|
Remove protected class access, add module docstrings.
|
Python
|
mit
|
christabor/flask_extras,christabor/jinja2_template_pack,christabor/jinja2_template_pack,christabor/flask_extras
|
python
|
## Code Before:
from __future__ import absolute_import
import unittest
from flask import Flask
from .. import config
app = Flask('__config_test')
class GetFuncsTest(unittest.TestCase):
"""All tests for get funcs function."""
def test_get_module_funcs(self):
"""Test the return value."""
self.assertIsInstance(config._get_funcs('__main__'), dict)
class InjectFiltersTest(unittest.TestCase):
"""All tests for inject filters function."""
def test_inject_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config._inject_filters(app, {}), Flask)
def test_inject_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config._inject_filters(app, {'foo': lambda x: x})
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
assert 'foo' in app.jinja_env.filters
class ConfigFlaskFiltersTest(unittest.TestCase):
"""All tests for config flask filters function."""
def test_config_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config.config_flask_filters(app), Flask)
def test_config_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config.config_flask_filters(app)
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
## Instruction:
Remove protected class access, add module docstrings.
## Code After:
"""Test configuration utilities."""
from __future__ import absolute_import
import unittest
from flask import Flask
from .. import config
app = Flask('__config_test')
class GetFuncsTest(unittest.TestCase):
"""All tests for get funcs function."""
def test_get_module_funcs(self):
"""Test the return value."""
self.assertIsInstance(config._get_funcs(config), dict)
def test_get_module_funcs_notempty(self):
"""Test the return value functions length."""
self.assertGreater(len(config._get_funcs(config).items()), 0)
class InjectFiltersTest(unittest.TestCase):
"""All tests for inject filters function."""
def test_inject_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config._inject_filters(app, {}), Flask)
def test_inject_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config._inject_filters(app, {'foo': lambda x: x})
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
assert 'foo' in app.jinja_env.filters
class ConfigFlaskFiltersTest(unittest.TestCase):
"""All tests for config flask filters function."""
def test_config_filters_inst(self):
"""Test the return value."""
self.assertIsInstance(config.config_flask_filters(app), Flask)
def test_config_filters_count(self):
"""Test the return value."""
old = len(app.jinja_env.filters)
config.config_flask_filters(app)
new = len(app.jinja_env.filters)
self.assertGreater(new, old)
|
// ... existing code ...
"""Test configuration utilities."""
from __future__ import absolute_import
import unittest
// ... modified code ...
def test_get_module_funcs(self):
"""Test the return value."""
self.assertIsInstance(config._get_funcs(config), dict)
def test_get_module_funcs_notempty(self):
"""Test the return value functions length."""
self.assertGreater(len(config._get_funcs(config).items()), 0)
class InjectFiltersTest(unittest.TestCase):
// ... rest of the code ...
|
fdd87814f68810a390c50f7bf2a08359430722fa
|
conda_build/main_index.py
|
conda_build/main_index.py
|
from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
Update command docs for conda index
|
Update command docs for conda index
|
Python
|
bsd-3-clause
|
frol/conda-build,rmcgibbo/conda-build,shastings517/conda-build,mwcraig/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,sandhujasmine/conda-build,frol/conda-build,rmcgibbo/conda-build,ilastik/conda-build,dan-blanchard/conda-build,shastings517/conda-build,frol/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,shastings517/conda-build,sandhujasmine/conda-build,ilastik/conda-build,ilastik/conda-build
|
python
|
## Code Before:
from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
## Instruction:
Update command docs for conda index
## Code After:
from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
// ... existing code ...
from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
// ... rest of the code ...
|
fb3a0db023161fbf5b08147dfac1b56989918bf6
|
tvseries/core/models.py
|
tvseries/core/models.py
|
from tvseries.ext import db
class TVSerie(db.Model):
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
from tvseries.ext import db
class TVSerie(db.Model):
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
Remove autoincrement sqlite paramether from model
|
Remove autoincrement sqlite paramether from model
|
Python
|
mit
|
rafaelhenrique/flask_tutorial,python-sorocaba/flask_tutorial,python-sorocaba/flask_tutorial,rafaelhenrique/flask_tutorial,python-sorocaba/flask_tutorial
|
python
|
## Code Before:
from tvseries.ext import db
class TVSerie(db.Model):
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
## Instruction:
Remove autoincrement sqlite paramether from model
## Code After:
from tvseries.ext import db
class TVSerie(db.Model):
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
name = db.Column(db.String(50), unique=True, nullable=False)
description = db.Column(db.Text, nullable=True)
episodies_number = db.Column(db.Integer, nullable=False, default=1)
author = db.Column(db.String(50), nullable=False)
def __repr__(self):
if self.description:
self.description = "{0}...".format(self.description[0:10])
return ("TVSerie(id={!r}, name={!r}, "
"description={!r}, episodies_number={!r})").format(
self.id, self.name,
self.description,
self.episodies_number)
|
...
class TVSerie(db.Model):
id = db.Column(db.Integer(),
nullable=False, unique=True,
autoincrement=True, primary_key=True)
...
|
2bab1888b43a9c232b37cc26c37df992ea5df2c5
|
project/apps/api/signals.py
|
project/apps/api/signals.py
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
|
Create sentinel rounds on Session creation
|
Create sentinel rounds on Session creation
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api
|
python
|
## Code Before:
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
## Instruction:
Create sentinel rounds on Session creation
## Code After:
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from .models import (
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
def performance_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
s = 1
while s <= instance.round.num_songs:
song = instance.songs.create(
performance=instance,
num=s,
)
s += 1
judges = instance.round.session.judges.filter(
category__in=[
instance.round.session.judges.model.CATEGORY.music,
instance.round.session.judges.model.CATEGORY.presentation,
instance.round.session.judges.model.CATEGORY.singing,
]
)
for judge in judges:
judge.scores.create(
judge=judge,
song=song,
category=judge.category,
kind=judge.kind,
)
|
# ... existing code ...
Performance,
Session,
)
@receiver(post_save, sender=Session)
def session_post_save(sender, instance=None, created=False, raw=False, **kwargs):
"""Create sentinels."""
if not raw:
if created:
i = 1
while i <= instance.num_rounds:
instance.rounds.create(
num=i,
kind=(instance.num_rounds - i) + 1,
)
i += 1
@receiver(post_save, sender=Performance)
# ... modified code ...
category=judge.category,
kind=judge.kind,
)
# ... rest of the code ...
|
e4edb986c6acfb48e0d95b845bcdca75595f5308
|
pevents.h
|
pevents.h
|
namespace neosmart
{
struct neosmart_event_t_;
typedef neosmart_event_t_ * neosmart_event_t;
neosmart_event_t CreateEvent(bool manualReset = false, bool initialState = false);
int DestroyEvent(neosmart_event_t event);
int WaitForEvent(neosmart_event_t event, uint32_t milliseconds = -1);
int SetEvent(neosmart_event_t event);
int ResetEvent(neosmart_event_t event);
}
|
namespace neosmart
{
//Type declarations
struct neosmart_event_t_;
typedef neosmart_event_t_ * neosmart_event_t;
//WIN32-style pevent functions
neosmart_event_t CreateEvent(bool manualReset = false, bool initialState = false);
int DestroyEvent(neosmart_event_t event);
int WaitForEvent(neosmart_event_t event, uint32_t milliseconds = -1);
int SetEvent(neosmart_event_t event);
int ResetEvent(neosmart_event_t event);
//posix-style functions
//TBD
}
|
Add posix-styled functions for using neosmart_event_t objects
|
TBD: Add posix-styled functions for using neosmart_event_t objects
|
C
|
mit
|
neosmart/pevents,neosmart/pevents
|
c
|
## Code Before:
namespace neosmart
{
struct neosmart_event_t_;
typedef neosmart_event_t_ * neosmart_event_t;
neosmart_event_t CreateEvent(bool manualReset = false, bool initialState = false);
int DestroyEvent(neosmart_event_t event);
int WaitForEvent(neosmart_event_t event, uint32_t milliseconds = -1);
int SetEvent(neosmart_event_t event);
int ResetEvent(neosmart_event_t event);
}
## Instruction:
TBD: Add posix-styled functions for using neosmart_event_t objects
## Code After:
namespace neosmart
{
//Type declarations
struct neosmart_event_t_;
typedef neosmart_event_t_ * neosmart_event_t;
//WIN32-style pevent functions
neosmart_event_t CreateEvent(bool manualReset = false, bool initialState = false);
int DestroyEvent(neosmart_event_t event);
int WaitForEvent(neosmart_event_t event, uint32_t milliseconds = -1);
int SetEvent(neosmart_event_t event);
int ResetEvent(neosmart_event_t event);
//posix-style functions
//TBD
}
|
...
namespace neosmart
{
//Type declarations
struct neosmart_event_t_;
typedef neosmart_event_t_ * neosmart_event_t;
//WIN32-style pevent functions
neosmart_event_t CreateEvent(bool manualReset = false, bool initialState = false);
int DestroyEvent(neosmart_event_t event);
int WaitForEvent(neosmart_event_t event, uint32_t milliseconds = -1);
int SetEvent(neosmart_event_t event);
int ResetEvent(neosmart_event_t event);
//posix-style functions
//TBD
}
...
|
2d13b639f17fd7430191c45ee14f6d200228fd5a
|
geoportal/geoportailv3_geoportal/views/luxthemes.py
|
geoportal/geoportailv3_geoportal/views/luxthemes.py
|
from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
|
import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
Fix themes.json with internal WMS
|
Fix themes.json with internal WMS
|
Python
|
mit
|
Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3
|
python
|
## Code Before:
from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
## Instruction:
Fix themes.json with internal WMS
## Code After:
import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
// ... existing code ...
import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
// ... rest of the code ...
|
49c73b00b5528706fbb340e53b37e59c8303d70d
|
oneflow/settings/snippets/common_production.py
|
oneflow/settings/snippets/common_production.py
|
ALLOWED_HOSTS += [
'1flow.io',
'app.1flow.io',
'api.1flow.io',
]
|
MANAGERS += (('Matthieu Chaignot', '[email protected]'), )
ALLOWED_HOSTS += [
'1flow.io',
'app.1flow.io',
'api.1flow.io',
]
|
Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail.
|
Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail.
|
Python
|
agpl-3.0
|
1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow
|
python
|
## Code Before:
ALLOWED_HOSTS += [
'1flow.io',
'app.1flow.io',
'api.1flow.io',
]
## Instruction:
Add Matthieu to MANAGERS, for him to receive the warn-closed-feed mail.
## Code After:
MANAGERS += (('Matthieu Chaignot', '[email protected]'), )
ALLOWED_HOSTS += [
'1flow.io',
'app.1flow.io',
'api.1flow.io',
]
|
...
MANAGERS += (('Matthieu Chaignot', '[email protected]'), )
ALLOWED_HOSTS += [
'1flow.io',
...
|
35366b0c2622bfbf354fbbcb1bc2dc7ca191dc3e
|
base/src/main/java/uk/ac/ebi/atlas/profiles/baseline/ExpressionsRowRawDeserializerBaseline.java
|
base/src/main/java/uk/ac/ebi/atlas/profiles/baseline/ExpressionsRowRawDeserializerBaseline.java
|
package uk.ac.ebi.atlas.profiles.baseline;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import uk.ac.ebi.atlas.model.experiment.baseline.BaselineExpression;
import uk.ac.ebi.atlas.model.experiment.baseline.FactorGroup;
import uk.ac.ebi.atlas.profiles.ExpressionsRowRawDeserializer;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
public class ExpressionsRowRawDeserializerBaseline extends ExpressionsRowRawDeserializer<BaselineExpression> {
final int expectedNumberOfValues;
Iterator<FactorGroup> factorGroups;
public ExpressionsRowRawDeserializerBaseline(List<FactorGroup> orderedFactorGroups) {
expectedNumberOfValues = orderedFactorGroups.size();
factorGroups = Iterables.cycle(orderedFactorGroups).iterator();
}
@Override
public ExpressionsRowRawDeserializer<BaselineExpression> reload(BaselineExpression... values) {
if (values.length != expectedNumberOfValues) {
throw new IllegalArgumentException(String.format("Expected %s values but got [%s]", expectedNumberOfValues, Joiner.on(",").join(values)));
}
return super.reload(values);
}
@Override
public BaselineExpression nextExpression(Queue<BaselineExpression> rawValuesRow) {
return rawValuesRow.poll();
}
}
|
package uk.ac.ebi.atlas.profiles.baseline;
import com.google.common.base.Joiner;
import uk.ac.ebi.atlas.model.experiment.baseline.BaselineExpression;
import uk.ac.ebi.atlas.model.experiment.baseline.FactorGroup;
import uk.ac.ebi.atlas.profiles.ExpressionsRowRawDeserializer;
import java.util.List;
import java.util.Queue;
public class ExpressionsRowRawDeserializerBaseline extends ExpressionsRowRawDeserializer<BaselineExpression> {
private int expectedNumberOfValues;
public ExpressionsRowRawDeserializerBaseline(List<FactorGroup> orderedFactorGroups) {
expectedNumberOfValues = orderedFactorGroups.size();
}
@Override
public ExpressionsRowRawDeserializer<BaselineExpression> reload(BaselineExpression... values) {
if (values.length != expectedNumberOfValues) {
throw new IllegalArgumentException(String.format("Expected %s values but got [%s]", expectedNumberOfValues, Joiner.on(",").join(values)));
}
return super.reload(values);
}
@Override
public BaselineExpression nextExpression(Queue<BaselineExpression> rawValuesRow) {
return rawValuesRow.poll();
}
}
|
Remove field which was never accessed
|
Remove field which was never accessed
|
Java
|
apache-2.0
|
gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas,gxa/atlas
|
java
|
## Code Before:
package uk.ac.ebi.atlas.profiles.baseline;
import com.google.common.base.Joiner;
import com.google.common.collect.Iterables;
import uk.ac.ebi.atlas.model.experiment.baseline.BaselineExpression;
import uk.ac.ebi.atlas.model.experiment.baseline.FactorGroup;
import uk.ac.ebi.atlas.profiles.ExpressionsRowRawDeserializer;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
public class ExpressionsRowRawDeserializerBaseline extends ExpressionsRowRawDeserializer<BaselineExpression> {
final int expectedNumberOfValues;
Iterator<FactorGroup> factorGroups;
public ExpressionsRowRawDeserializerBaseline(List<FactorGroup> orderedFactorGroups) {
expectedNumberOfValues = orderedFactorGroups.size();
factorGroups = Iterables.cycle(orderedFactorGroups).iterator();
}
@Override
public ExpressionsRowRawDeserializer<BaselineExpression> reload(BaselineExpression... values) {
if (values.length != expectedNumberOfValues) {
throw new IllegalArgumentException(String.format("Expected %s values but got [%s]", expectedNumberOfValues, Joiner.on(",").join(values)));
}
return super.reload(values);
}
@Override
public BaselineExpression nextExpression(Queue<BaselineExpression> rawValuesRow) {
return rawValuesRow.poll();
}
}
## Instruction:
Remove field which was never accessed
## Code After:
package uk.ac.ebi.atlas.profiles.baseline;
import com.google.common.base.Joiner;
import uk.ac.ebi.atlas.model.experiment.baseline.BaselineExpression;
import uk.ac.ebi.atlas.model.experiment.baseline.FactorGroup;
import uk.ac.ebi.atlas.profiles.ExpressionsRowRawDeserializer;
import java.util.List;
import java.util.Queue;
public class ExpressionsRowRawDeserializerBaseline extends ExpressionsRowRawDeserializer<BaselineExpression> {
private int expectedNumberOfValues;
public ExpressionsRowRawDeserializerBaseline(List<FactorGroup> orderedFactorGroups) {
expectedNumberOfValues = orderedFactorGroups.size();
}
@Override
public ExpressionsRowRawDeserializer<BaselineExpression> reload(BaselineExpression... values) {
if (values.length != expectedNumberOfValues) {
throw new IllegalArgumentException(String.format("Expected %s values but got [%s]", expectedNumberOfValues, Joiner.on(",").join(values)));
}
return super.reload(values);
}
@Override
public BaselineExpression nextExpression(Queue<BaselineExpression> rawValuesRow) {
return rawValuesRow.poll();
}
}
|
...
package uk.ac.ebi.atlas.profiles.baseline;
import com.google.common.base.Joiner;
import uk.ac.ebi.atlas.model.experiment.baseline.BaselineExpression;
import uk.ac.ebi.atlas.model.experiment.baseline.FactorGroup;
import uk.ac.ebi.atlas.profiles.ExpressionsRowRawDeserializer;
import java.util.List;
import java.util.Queue;
public class ExpressionsRowRawDeserializerBaseline extends ExpressionsRowRawDeserializer<BaselineExpression> {
private int expectedNumberOfValues;
public ExpressionsRowRawDeserializerBaseline(List<FactorGroup> orderedFactorGroups) {
expectedNumberOfValues = orderedFactorGroups.size();
}
@Override
...
|
b0a94dc2f696464db999e652b4a9dbdaf96f8532
|
backend/talks/forms.py
|
backend/talks/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
|
Mark conference and language as required
|
Mark conference and language as required
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
python
|
## Code Before:
from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
## Instruction:
Mark conference and language as required
## Code After:
from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
|
...
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
...
|
02ed846cb365e0717a888da0c56065fd54a03a7f
|
setup.py
|
setup.py
|
import setuptools
import shakyo
import sys
if not ((sys.version_info.major >= 3 and sys.version_info.minor >= 5)
or sys.version_info.major > 3):
exit("Sorry, Python's version must be later than 3.5.")
setuptools.setup(
name=shakyo.__name__,
version=shakyo.__version__,
description="a tool to learn about something just by copying it by hand",
license="Public Domain",
author="raviqqe",
author_email="[email protected]",
url="http://github.com/raviqqe/shakyo/",
py_modules=[shakyo.__name__],
install_requires=["text_unidecode", "validators"],
classifiers=[
"Development Status :: Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: Public Domain",
"Operating System :: POSIX",
"Topic :: Education :: Computer Aided Instruction (CAI)",
"Topic :: Games/Entertainment",
],
)
|
import setuptools
import sys
if not ((sys.version_info.major >= 3 and sys.version_info.minor >= 5)
or sys.version_info.major > 3):
exit("Sorry, Python's version must be later than 3.5.")
import shakyo
setuptools.setup(
name=shakyo.__name__,
version=shakyo.__version__,
description="a tool to learn about something just by copying it by hand",
license="Public Domain",
author="raviqqe",
author_email="[email protected]",
url="http://github.com/raviqqe/shakyo/",
py_modules=[shakyo.__name__],
install_requires=["text_unidecode", "validators"],
classifiers=[
"Development Status :: Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: Public Domain",
"Operating System :: POSIX",
"Topic :: Education :: Computer Aided Instruction (CAI)",
"Topic :: Games/Entertainment",
],
)
|
Fix point of importing module
|
Fix point of importing module
|
Python
|
unlicense
|
raviqqe/shakyo
|
python
|
## Code Before:
import setuptools
import shakyo
import sys
if not ((sys.version_info.major >= 3 and sys.version_info.minor >= 5)
or sys.version_info.major > 3):
exit("Sorry, Python's version must be later than 3.5.")
setuptools.setup(
name=shakyo.__name__,
version=shakyo.__version__,
description="a tool to learn about something just by copying it by hand",
license="Public Domain",
author="raviqqe",
author_email="[email protected]",
url="http://github.com/raviqqe/shakyo/",
py_modules=[shakyo.__name__],
install_requires=["text_unidecode", "validators"],
classifiers=[
"Development Status :: Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: Public Domain",
"Operating System :: POSIX",
"Topic :: Education :: Computer Aided Instruction (CAI)",
"Topic :: Games/Entertainment",
],
)
## Instruction:
Fix point of importing module
## Code After:
import setuptools
import sys
if not ((sys.version_info.major >= 3 and sys.version_info.minor >= 5)
or sys.version_info.major > 3):
exit("Sorry, Python's version must be later than 3.5.")
import shakyo
setuptools.setup(
name=shakyo.__name__,
version=shakyo.__version__,
description="a tool to learn about something just by copying it by hand",
license="Public Domain",
author="raviqqe",
author_email="[email protected]",
url="http://github.com/raviqqe/shakyo/",
py_modules=[shakyo.__name__],
install_requires=["text_unidecode", "validators"],
classifiers=[
"Development Status :: Alpha",
"Environment :: Console :: Curses",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: Public Domain",
"Operating System :: POSIX",
"Topic :: Education :: Computer Aided Instruction (CAI)",
"Topic :: Games/Entertainment",
],
)
|
# ... existing code ...
import setuptools
import sys
if not ((sys.version_info.major >= 3 and sys.version_info.minor >= 5)
or sys.version_info.major > 3):
exit("Sorry, Python's version must be later than 3.5.")
import shakyo
setuptools.setup(
# ... rest of the code ...
|
bbaf4584286657582a92d5bb4038a5a06654ebb1
|
fetch-pack.h
|
fetch-pack.h
|
struct fetch_pack_args
{
const char *uploadpack;
int quiet;
int keep_pack;
int unpacklimit;
int use_thin_pack;
int fetch_all;
int verbose;
int depth;
int no_progress;
};
void setup_fetch_pack(struct fetch_pack_args *args);
struct ref *fetch_pack(const char *dest, int nr_heads, char **heads, char **pack_lockfile);
#endif
|
struct fetch_pack_args
{
const char *uploadpack;
int unpacklimit;
int depth;
unsigned quiet:1,
keep_pack:1,
use_thin_pack:1,
fetch_all:1,
verbose:1,
no_progress:1;
};
void setup_fetch_pack(struct fetch_pack_args *args);
struct ref *fetch_pack(const char *dest, int nr_heads, char **heads, char **pack_lockfile);
#endif
|
Use 'unsigned:1' when we mean boolean options
|
Use 'unsigned:1' when we mean boolean options
These options are all strictly boolean (true/false). Its easier to
document this implicitly by making their storage type a single bit.
There is no compelling memory space reduction reason for this change,
it just makes the structure definition slightly more readable.
Signed-off-by: Shawn O. Pearce <[email protected]>
|
C
|
mit
|
destenson/git,destenson/git,destenson/git,destenson/git,destenson/git,destenson/git,destenson/git,destenson/git
|
c
|
## Code Before:
struct fetch_pack_args
{
const char *uploadpack;
int quiet;
int keep_pack;
int unpacklimit;
int use_thin_pack;
int fetch_all;
int verbose;
int depth;
int no_progress;
};
void setup_fetch_pack(struct fetch_pack_args *args);
struct ref *fetch_pack(const char *dest, int nr_heads, char **heads, char **pack_lockfile);
#endif
## Instruction:
Use 'unsigned:1' when we mean boolean options
These options are all strictly boolean (true/false). Its easier to
document this implicitly by making their storage type a single bit.
There is no compelling memory space reduction reason for this change,
it just makes the structure definition slightly more readable.
Signed-off-by: Shawn O. Pearce <[email protected]>
## Code After:
struct fetch_pack_args
{
const char *uploadpack;
int unpacklimit;
int depth;
unsigned quiet:1,
keep_pack:1,
use_thin_pack:1,
fetch_all:1,
verbose:1,
no_progress:1;
};
void setup_fetch_pack(struct fetch_pack_args *args);
struct ref *fetch_pack(const char *dest, int nr_heads, char **heads, char **pack_lockfile);
#endif
|
// ... existing code ...
struct fetch_pack_args
{
const char *uploadpack;
int unpacklimit;
int depth;
unsigned quiet:1,
keep_pack:1,
use_thin_pack:1,
fetch_all:1,
verbose:1,
no_progress:1;
};
void setup_fetch_pack(struct fetch_pack_args *args);
// ... rest of the code ...
|
0ed9e159fa606c9dbdb90dfc64fcb357e9f9cedb
|
plenum/test/test_request.py
|
plenum/test/test_request.py
|
from indy_common.types import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
req = Request()
assert req.all_identifiers == []
|
from plenum.common.request import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
req = Request()
assert req.all_identifiers == []
|
Fix wrong import in test
|
Fix wrong import in test
Signed-off-by: Sergey Khoroshavin <[email protected]>
|
Python
|
apache-2.0
|
evernym/zeno,evernym/plenum
|
python
|
## Code Before:
from indy_common.types import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
req = Request()
assert req.all_identifiers == []
## Instruction:
Fix wrong import in test
Signed-off-by: Sergey Khoroshavin <[email protected]>
## Code After:
from plenum.common.request import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
req = Request()
assert req.all_identifiers == []
|
...
from plenum.common.request import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
...
|
94c2a97bf926e01ae68730ffdb92a9d1439b3dcc
|
enabler/src/com/openxc/enabler/PipelineStatusUpdateTask.java
|
enabler/src/com/openxc/enabler/PipelineStatusUpdateTask.java
|
package com.openxc.enabler;
import java.util.TimerTask;
import android.app.Activity;
import com.openxc.VehicleManager;
import android.widget.ListView;
import android.widget.ArrayAdapter;
public class PipelineStatusUpdateTask extends TimerTask {
private VehicleManager mVehicleManager;
private Activity mActivity;
private ListView mSourceListView;
private ListView mSinkListView;
public PipelineStatusUpdateTask(VehicleManager vehicleService,
Activity activity, ListView sourceListView, ListView sinkListView) {
mVehicleManager = vehicleService;
mActivity = activity;
mSourceListView = sourceListView;
mSinkListView = sinkListView;
}
public void run() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
mSourceListView.setAdapter(new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1,
mVehicleManager.getSourceSummaries().toArray()));
mSinkListView.setAdapter(new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1,
mVehicleManager.getSinkSummaries().toArray()));
}
});
}
}
|
package com.openxc.enabler;
import java.util.TimerTask;
import android.app.Activity;
import com.openxc.VehicleManager;
import android.widget.ListView;
import android.widget.ArrayAdapter;
public class PipelineStatusUpdateTask extends TimerTask {
private VehicleManager mVehicleManager;
private Activity mActivity;
private ListView mSourceListView;
private ListView mSinkListView;
private ArrayAdapter<Object> mSourceListAdapter;
private ArrayAdapter<Object> mSinkListAdapter;
public PipelineStatusUpdateTask(VehicleManager vehicleService,
Activity activity, ListView sourceListView, ListView sinkListView) {
mVehicleManager = vehicleService;
mActivity = activity;
mSourceListView = sourceListView;
mSinkListView = sinkListView;
mSourceListAdapter = new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1);
mSourceListView.setAdapter(mSourceListAdapter);
mSinkListAdapter = new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1);
mSinkListView.setAdapter(mSinkListAdapter);
}
public void run() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
mSourceListAdapter.clear();
mSourceListAdapter.addAll(
mVehicleManager.getSourceSummaries().toArray());
mSourceListAdapter.notifyDataSetChanged();
mSinkListAdapter.clear();
mSinkListAdapter.addAll(
mVehicleManager.getSinkSummaries().toArray());
mSinkListAdapter.notifyDataSetChanged();
}
});
}
}
|
Maintain scroll position in data source/sink list in Enabler.
|
Maintain scroll position in data source/sink list in Enabler.
Instead of blowing away the adapter each time we update, keep the same adapter
and just update the list.
Thanks to
http://vikinghammer.com/2011/06/17/android-listview-maintain-your-scroll-position-when-you-refresh/
|
Java
|
bsd-3-clause
|
prateeknitish391/demo,mray19027/openxc-android,prateeknitish391/demo,msowka/openxc-android,dhootha/openxc-android,ChernyshovYuriy/openxc-android,openxc/openxc-android,openxc/openxc-android,msowka/openxc-android,mray19027/openxc-android,ChernyshovYuriy/openxc-android,petemaclellan/openxc-android,prateeknitish391/demo,petemaclellan/openxc-android,dhootha/openxc-android
|
java
|
## Code Before:
package com.openxc.enabler;
import java.util.TimerTask;
import android.app.Activity;
import com.openxc.VehicleManager;
import android.widget.ListView;
import android.widget.ArrayAdapter;
public class PipelineStatusUpdateTask extends TimerTask {
private VehicleManager mVehicleManager;
private Activity mActivity;
private ListView mSourceListView;
private ListView mSinkListView;
public PipelineStatusUpdateTask(VehicleManager vehicleService,
Activity activity, ListView sourceListView, ListView sinkListView) {
mVehicleManager = vehicleService;
mActivity = activity;
mSourceListView = sourceListView;
mSinkListView = sinkListView;
}
public void run() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
mSourceListView.setAdapter(new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1,
mVehicleManager.getSourceSummaries().toArray()));
mSinkListView.setAdapter(new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1,
mVehicleManager.getSinkSummaries().toArray()));
}
});
}
}
## Instruction:
Maintain scroll position in data source/sink list in Enabler.
Instead of blowing away the adapter each time we update, keep the same adapter
and just update the list.
Thanks to
http://vikinghammer.com/2011/06/17/android-listview-maintain-your-scroll-position-when-you-refresh/
## Code After:
package com.openxc.enabler;
import java.util.TimerTask;
import android.app.Activity;
import com.openxc.VehicleManager;
import android.widget.ListView;
import android.widget.ArrayAdapter;
public class PipelineStatusUpdateTask extends TimerTask {
private VehicleManager mVehicleManager;
private Activity mActivity;
private ListView mSourceListView;
private ListView mSinkListView;
private ArrayAdapter<Object> mSourceListAdapter;
private ArrayAdapter<Object> mSinkListAdapter;
public PipelineStatusUpdateTask(VehicleManager vehicleService,
Activity activity, ListView sourceListView, ListView sinkListView) {
mVehicleManager = vehicleService;
mActivity = activity;
mSourceListView = sourceListView;
mSinkListView = sinkListView;
mSourceListAdapter = new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1);
mSourceListView.setAdapter(mSourceListAdapter);
mSinkListAdapter = new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1);
mSinkListView.setAdapter(mSinkListAdapter);
}
public void run() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
mSourceListAdapter.clear();
mSourceListAdapter.addAll(
mVehicleManager.getSourceSummaries().toArray());
mSourceListAdapter.notifyDataSetChanged();
mSinkListAdapter.clear();
mSinkListAdapter.addAll(
mVehicleManager.getSinkSummaries().toArray());
mSinkListAdapter.notifyDataSetChanged();
}
});
}
}
|
...
private Activity mActivity;
private ListView mSourceListView;
private ListView mSinkListView;
private ArrayAdapter<Object> mSourceListAdapter;
private ArrayAdapter<Object> mSinkListAdapter;
public PipelineStatusUpdateTask(VehicleManager vehicleService,
Activity activity, ListView sourceListView, ListView sinkListView) {
...
mActivity = activity;
mSourceListView = sourceListView;
mSinkListView = sinkListView;
mSourceListAdapter = new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1);
mSourceListView.setAdapter(mSourceListAdapter);
mSinkListAdapter = new ArrayAdapter<Object>(mActivity,
android.R.layout.simple_list_item_1);
mSinkListView.setAdapter(mSinkListAdapter);
}
public void run() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
mSourceListAdapter.clear();
mSourceListAdapter.addAll(
mVehicleManager.getSourceSummaries().toArray());
mSourceListAdapter.notifyDataSetChanged();
mSinkListAdapter.clear();
mSinkListAdapter.addAll(
mVehicleManager.getSinkSummaries().toArray());
mSinkListAdapter.notifyDataSetChanged();
}
});
}
...
|
ff4477c870b9c618b7432047071792c3a8055eb7
|
coffeeraspi/messages.py
|
coffeeraspi/messages.py
|
class DrinkOrder():
def __init__(self, mug_size, add_ins, name=None):
self.mug_size = mug_size
self.add_ins = add_ins
self.name = name
@classmethod
def deserialize(cls, data):
return DrinkOrder(data['mug_size'],
data['add_ins'],
data.get('name', None))
|
class DrinkOrder():
def __init__(self, mug_size, add_ins, name=None):
self.mug_size = mug_size
self.add_ins = add_ins
self.name = name
@classmethod
def deserialize(cls, data):
return DrinkOrder(data['mug_size'],
data['add_ins'],
data.get('name', None))
def __str__(self):
return 'DrinkOrder("{}")'.format(self.name if self.name else '')
|
Add nicer drink order logging
|
Add nicer drink order logging
|
Python
|
apache-2.0
|
umbc-hackafe/htcpcp,umbc-hackafe/htcpcp,umbc-hackafe/htcpcp,umbc-hackafe/htcpcp
|
python
|
## Code Before:
class DrinkOrder():
def __init__(self, mug_size, add_ins, name=None):
self.mug_size = mug_size
self.add_ins = add_ins
self.name = name
@classmethod
def deserialize(cls, data):
return DrinkOrder(data['mug_size'],
data['add_ins'],
data.get('name', None))
## Instruction:
Add nicer drink order logging
## Code After:
class DrinkOrder():
def __init__(self, mug_size, add_ins, name=None):
self.mug_size = mug_size
self.add_ins = add_ins
self.name = name
@classmethod
def deserialize(cls, data):
return DrinkOrder(data['mug_size'],
data['add_ins'],
data.get('name', None))
def __str__(self):
return 'DrinkOrder("{}")'.format(self.name if self.name else '')
|
# ... existing code ...
return DrinkOrder(data['mug_size'],
data['add_ins'],
data.get('name', None))
def __str__(self):
return 'DrinkOrder("{}")'.format(self.name if self.name else '')
# ... rest of the code ...
|
1e775fbc8e11f44b8a680e17ac35e735e52d5739
|
fabfile.py
|
fabfile.py
|
from fabric.api import run, env
from fabric.context_managers import cd
import os
env.hosts = ['[email protected]:1337']
def update_podcasts():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py updatepodcasts')
def setup_dev():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py syncdb')
run('python3 manage.py loaddata sample_podcasts')
run('python3 manage.py updatepodcasts')
run('python3 manage.py fetchepisodes')
run('python3 manage.py update_index')
def rebuild_index():
with cd('"{}"'.format(os.path.dirname(__file__))):
# Add --noinput flag because of this issue:
# https://github.com/toastdriven/django-haystack/issues/902
run('python3 manage.py rebuild_index --noinput')
|
from fabric.api import run, env
from fabric.context_managers import cd
import os
env.hosts = ['[email protected]:1337']
def update_podcasts():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py updatepodcasts')
def fetch_episodes():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py fetchepisodes')
def setup_dev():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py syncdb')
run('python3 manage.py loaddata sample_podcasts')
run('python3 manage.py updatepodcasts')
run('python3 manage.py fetchepisodes')
run('python3 manage.py update_index')
def rebuild_index():
with cd('"{}"'.format(os.path.dirname(__file__))):
# Add --noinput flag because of this issue:
# https://github.com/toastdriven/django-haystack/issues/902
run('python3 manage.py rebuild_index --noinput')
|
Add fab command for fetching episodes
|
Add fab command for fetching episodes
|
Python
|
mit
|
matachi/sputnik,matachi/sputnik,matachi/sputnik,matachi/sputnik
|
python
|
## Code Before:
from fabric.api import run, env
from fabric.context_managers import cd
import os
env.hosts = ['[email protected]:1337']
def update_podcasts():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py updatepodcasts')
def setup_dev():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py syncdb')
run('python3 manage.py loaddata sample_podcasts')
run('python3 manage.py updatepodcasts')
run('python3 manage.py fetchepisodes')
run('python3 manage.py update_index')
def rebuild_index():
with cd('"{}"'.format(os.path.dirname(__file__))):
# Add --noinput flag because of this issue:
# https://github.com/toastdriven/django-haystack/issues/902
run('python3 manage.py rebuild_index --noinput')
## Instruction:
Add fab command for fetching episodes
## Code After:
from fabric.api import run, env
from fabric.context_managers import cd
import os
env.hosts = ['[email protected]:1337']
def update_podcasts():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py updatepodcasts')
def fetch_episodes():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py fetchepisodes')
def setup_dev():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py syncdb')
run('python3 manage.py loaddata sample_podcasts')
run('python3 manage.py updatepodcasts')
run('python3 manage.py fetchepisodes')
run('python3 manage.py update_index')
def rebuild_index():
with cd('"{}"'.format(os.path.dirname(__file__))):
# Add --noinput flag because of this issue:
# https://github.com/toastdriven/django-haystack/issues/902
run('python3 manage.py rebuild_index --noinput')
|
# ... existing code ...
def update_podcasts():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py updatepodcasts')
def fetch_episodes():
with cd('"{}"'.format(os.path.dirname(__file__))):
run('python3 manage.py fetchepisodes')
def setup_dev():
# ... rest of the code ...
|
2e361627ca94b3a3b1cdd9583d22ca8ff81a0591
|
rpn/util.py
|
rpn/util.py
|
from functools import wraps
import subprocess
_SELECTIONS = {
'+': 'clipboard',
'*': 'primary',
}
def _store_selection(data, selection):
with subprocess.Popen(['xclip',
'-selection', selection],
stdin=subprocess.PIPE) as xclip:
xclip.stdin.write(str(data).encode())
def _load_selection(selection):
with subprocess.Popen(['xclip',
'-selection', selection,
'-o'], stdout=PIPE) as xclip:
return xclip.stdout.read().decode()
class RPNError(Exception):
pass
def wrap_user_errors(fmt):
'''
Ugly hack decorator that converts exceptions to warnings.
Passes through RPNErrors.
'''
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except RPNError:
raise
except Exception as e:
raise RPNError(fmt.format(*args, **kwargs), e)
return wrapper
return decorator
|
from functools import wraps
import subprocess
_SELECTIONS = {
'+': 'clipboard',
'*': 'primary',
}
def _store_selection(data, selection):
with subprocess.Popen(['xclip',
'-selection', selection],
stdin=subprocess.PIPE) as xclip:
xclip.stdin.write(str(data).encode())
def _load_selection(selection):
with subprocess.Popen(['xclip',
'-selection', selection,
'-o'], stdout=subprocess.PIPE) as xclip:
return xclip.stdout.read().decode()
class RPNError(Exception):
pass
def wrap_user_errors(fmt):
'''
Ugly hack decorator that converts exceptions to warnings.
Passes through RPNErrors.
'''
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except RPNError:
raise
except Exception as e:
raise RPNError(fmt.format(*args, **kwargs), e)
return wrapper
return decorator
|
Fix typo resulting in NameError
|
Fix typo resulting in NameError
|
Python
|
isc
|
pilona/RPN,pilona/RPN
|
python
|
## Code Before:
from functools import wraps
import subprocess
_SELECTIONS = {
'+': 'clipboard',
'*': 'primary',
}
def _store_selection(data, selection):
with subprocess.Popen(['xclip',
'-selection', selection],
stdin=subprocess.PIPE) as xclip:
xclip.stdin.write(str(data).encode())
def _load_selection(selection):
with subprocess.Popen(['xclip',
'-selection', selection,
'-o'], stdout=PIPE) as xclip:
return xclip.stdout.read().decode()
class RPNError(Exception):
pass
def wrap_user_errors(fmt):
'''
Ugly hack decorator that converts exceptions to warnings.
Passes through RPNErrors.
'''
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except RPNError:
raise
except Exception as e:
raise RPNError(fmt.format(*args, **kwargs), e)
return wrapper
return decorator
## Instruction:
Fix typo resulting in NameError
## Code After:
from functools import wraps
import subprocess
_SELECTIONS = {
'+': 'clipboard',
'*': 'primary',
}
def _store_selection(data, selection):
with subprocess.Popen(['xclip',
'-selection', selection],
stdin=subprocess.PIPE) as xclip:
xclip.stdin.write(str(data).encode())
def _load_selection(selection):
with subprocess.Popen(['xclip',
'-selection', selection,
'-o'], stdout=subprocess.PIPE) as xclip:
return xclip.stdout.read().decode()
class RPNError(Exception):
pass
def wrap_user_errors(fmt):
'''
Ugly hack decorator that converts exceptions to warnings.
Passes through RPNErrors.
'''
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except RPNError:
raise
except Exception as e:
raise RPNError(fmt.format(*args, **kwargs), e)
return wrapper
return decorator
|
# ... existing code ...
def _load_selection(selection):
with subprocess.Popen(['xclip',
'-selection', selection,
'-o'], stdout=subprocess.PIPE) as xclip:
return xclip.stdout.read().decode()
# ... rest of the code ...
|
6bc6a07ee60f68e2003b5afcc752c3820a176541
|
astropy/conftest.py
|
astropy/conftest.py
|
from .tests.pytest_plugins import *
try:
import matplotlib
except ImportError:
pass
else:
matplotlib.use('Agg')
enable_deprecations_as_exceptions(include_astropy_deprecations=False)
|
from .tests.pytest_plugins import *
try:
import matplotlib
except ImportError:
pass
else:
matplotlib.use('Agg')
enable_deprecations_as_exceptions(include_astropy_deprecations=False)
PYTEST_HEADER_MODULES['Cython'] = 'cython'
|
Add Cython to py.test header
|
Add Cython to py.test header
|
Python
|
bsd-3-clause
|
kelle/astropy,tbabej/astropy,lpsinger/astropy,joergdietrich/astropy,pllim/astropy,MSeifert04/astropy,AustereCuriosity/astropy,saimn/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,tbabej/astropy,mhvk/astropy,DougBurke/astropy,pllim/astropy,StuartLittlefair/astropy,astropy/astropy,kelle/astropy,AustereCuriosity/astropy,pllim/astropy,funbaker/astropy,mhvk/astropy,larrybradley/astropy,dhomeier/astropy,larrybradley/astropy,astropy/astropy,MSeifert04/astropy,DougBurke/astropy,astropy/astropy,kelle/astropy,saimn/astropy,bsipocz/astropy,kelle/astropy,stargaser/astropy,lpsinger/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,astropy/astropy,AustereCuriosity/astropy,bsipocz/astropy,stargaser/astropy,dhomeier/astropy,stargaser/astropy,DougBurke/astropy,larrybradley/astropy,mhvk/astropy,MSeifert04/astropy,tbabej/astropy,pllim/astropy,StuartLittlefair/astropy,lpsinger/astropy,StuartLittlefair/astropy,stargaser/astropy,funbaker/astropy,lpsinger/astropy,saimn/astropy,pllim/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,astropy/astropy,larrybradley/astropy,mhvk/astropy,lpsinger/astropy,dhomeier/astropy,funbaker/astropy,mhvk/astropy,larrybradley/astropy,MSeifert04/astropy,funbaker/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,saimn/astropy,dhomeier/astropy,AustereCuriosity/astropy,saimn/astropy,joergdietrich/astropy,StuartLittlefair/astropy,dhomeier/astropy,DougBurke/astropy,tbabej/astropy,tbabej/astropy,joergdietrich/astropy,kelle/astropy,bsipocz/astropy,joergdietrich/astropy
|
python
|
## Code Before:
from .tests.pytest_plugins import *
try:
import matplotlib
except ImportError:
pass
else:
matplotlib.use('Agg')
enable_deprecations_as_exceptions(include_astropy_deprecations=False)
## Instruction:
Add Cython to py.test header
## Code After:
from .tests.pytest_plugins import *
try:
import matplotlib
except ImportError:
pass
else:
matplotlib.use('Agg')
enable_deprecations_as_exceptions(include_astropy_deprecations=False)
PYTEST_HEADER_MODULES['Cython'] = 'cython'
|
...
matplotlib.use('Agg')
enable_deprecations_as_exceptions(include_astropy_deprecations=False)
PYTEST_HEADER_MODULES['Cython'] = 'cython'
...
|
62085b0fc21b655f26bc7a0a5495597f5c843120
|
processor/src/test/resources/expected/Arez_TimeModel.java
|
processor/src/test/resources/expected/Arez_TimeModel.java
|
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.Observable;
@Generated( "org.realityforge.arez.processor.ArezProcessor" )
public final class Arez_TimeModel
extends TimeModel
{
private final ArezContext $arez$_context;
private final Observable $arez$_time;
protected Arez_TimeModel( @Nonnull final ArezContext $arez$_context, final long time )
{
super( time );
this.$arez$_context = $arez$_context;
this.$arez$_time = $arez$_context.createObservable( "Time.time" );
}
@Override
public long getTime()
{
this.$arez$_time.reportObserved();
return super.getTime();
}
@Override
public void setTime( final long time )
{
if ( super.getTime() != time )
{
this.$arez$_time.reportChanged();
super.setTime( time );
}
}
@Override
public void updateTime()
{
this.$arez$_context.safeProcedure( "Time.updateTime", true, () -> super.updateTime() );
}
}
|
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.Observable;
@Generated( "org.realityforge.arez.processor.ArezProcessor" )
public final class Arez_TimeModel
extends TimeModel
{
private final ArezContext $arez$_context;
private final Observable $arez$_time;
protected Arez_TimeModel( @Nonnull final ArezContext $arez$_context, final long time )
{
super( time );
this.$arez$_context = $arez$_context;
this.$arez$_time = $arez$_context.createObservable( "Time.time" );
}
@Override
public long getTime()
{
this.$arez$_time.reportObserved();
return super.getTime();
}
@Override
public void setTime( final long time )
{
if ( super.getTime() != time )
{
super.setTime( time );
this.$arez$_time.reportChanged();
}
}
@Override
public void updateTime()
{
this.$arez$_context.safeProcedure( "Time.updateTime", true, () -> super.updateTime() );
}
}
|
Move reportChange to after change occurs
|
Move reportChange to after change occurs
|
Java
|
apache-2.0
|
realityforge/arez,realityforge/arez,realityforge/arez
|
java
|
## Code Before:
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.Observable;
@Generated( "org.realityforge.arez.processor.ArezProcessor" )
public final class Arez_TimeModel
extends TimeModel
{
private final ArezContext $arez$_context;
private final Observable $arez$_time;
protected Arez_TimeModel( @Nonnull final ArezContext $arez$_context, final long time )
{
super( time );
this.$arez$_context = $arez$_context;
this.$arez$_time = $arez$_context.createObservable( "Time.time" );
}
@Override
public long getTime()
{
this.$arez$_time.reportObserved();
return super.getTime();
}
@Override
public void setTime( final long time )
{
if ( super.getTime() != time )
{
this.$arez$_time.reportChanged();
super.setTime( time );
}
}
@Override
public void updateTime()
{
this.$arez$_context.safeProcedure( "Time.updateTime", true, () -> super.updateTime() );
}
}
## Instruction:
Move reportChange to after change occurs
## Code After:
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import org.realityforge.arez.ArezContext;
import org.realityforge.arez.Observable;
@Generated( "org.realityforge.arez.processor.ArezProcessor" )
public final class Arez_TimeModel
extends TimeModel
{
private final ArezContext $arez$_context;
private final Observable $arez$_time;
protected Arez_TimeModel( @Nonnull final ArezContext $arez$_context, final long time )
{
super( time );
this.$arez$_context = $arez$_context;
this.$arez$_time = $arez$_context.createObservable( "Time.time" );
}
@Override
public long getTime()
{
this.$arez$_time.reportObserved();
return super.getTime();
}
@Override
public void setTime( final long time )
{
if ( super.getTime() != time )
{
super.setTime( time );
this.$arez$_time.reportChanged();
}
}
@Override
public void updateTime()
{
this.$arez$_context.safeProcedure( "Time.updateTime", true, () -> super.updateTime() );
}
}
|
# ... existing code ...
{
if ( super.getTime() != time )
{
super.setTime( time );
this.$arez$_time.reportChanged();
}
}
# ... rest of the code ...
|
fab0855e7076d7cfcfe2d65a820ed5099084f543
|
privileges/views.py
|
privileges/views.py
|
import urlparse
from functools import wraps
from django.conf import settings
from django.utils.decorators import available_attrs, method_decorator
from django.contrib.auth import REDIRECT_FIELD_NAME
from privileges.forms import GrantForm
from privileges.models import Grant
def owner_required(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if request.user.username == kwargs["username"] or \
request.user.is_superuser:
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME)
return _wrapped_view
def cbv_decorator(decorator):
def _decorator(cls):
cls.dispatch = method_decorator(decorator)(cls.dispatch)
return cls
return _decorator
|
import urlparse
from functools import wraps
from django.conf import settings
from django.utils.decorators import available_attrs, method_decorator
from django.contrib.auth import REDIRECT_FIELD_NAME
from privileges.forms import GrantForm
from privileges.models import Grant
def owner_required(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if request.user.username == kwargs["username"] or \
request.user.is_superuser:
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME)
return _wrapped_view
def cbv_decorator(decorator):
def _decorator(cls):
cls.dispatch = method_decorator(decorator)(cls.dispatch)
return cls
return _decorator
class UsernameContextMixin(object):
def get_context_data(self, **kwargs):
context = super(UsernameContextMixin, self).get_context_data(**kwargs)
context.update({
"username": self.kwargs.get("username")
})
return context
|
Add mixin to put the username in context
|
Add mixin to put the username in context
|
Python
|
bsd-3-clause
|
eldarion/privileges,jacobwegner/privileges,jacobwegner/privileges
|
python
|
## Code Before:
import urlparse
from functools import wraps
from django.conf import settings
from django.utils.decorators import available_attrs, method_decorator
from django.contrib.auth import REDIRECT_FIELD_NAME
from privileges.forms import GrantForm
from privileges.models import Grant
def owner_required(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if request.user.username == kwargs["username"] or \
request.user.is_superuser:
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME)
return _wrapped_view
def cbv_decorator(decorator):
def _decorator(cls):
cls.dispatch = method_decorator(decorator)(cls.dispatch)
return cls
return _decorator
## Instruction:
Add mixin to put the username in context
## Code After:
import urlparse
from functools import wraps
from django.conf import settings
from django.utils.decorators import available_attrs, method_decorator
from django.contrib.auth import REDIRECT_FIELD_NAME
from privileges.forms import GrantForm
from privileges.models import Grant
def owner_required(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if request.user.is_authenticated():
if request.user.username == kwargs["username"] or \
request.user.is_superuser:
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME)
return _wrapped_view
def cbv_decorator(decorator):
def _decorator(cls):
cls.dispatch = method_decorator(decorator)(cls.dispatch)
return cls
return _decorator
class UsernameContextMixin(object):
def get_context_data(self, **kwargs):
context = super(UsernameContextMixin, self).get_context_data(**kwargs)
context.update({
"username": self.kwargs.get("username")
})
return context
|
# ... existing code ...
return _decorator
class UsernameContextMixin(object):
def get_context_data(self, **kwargs):
context = super(UsernameContextMixin, self).get_context_data(**kwargs)
context.update({
"username": self.kwargs.get("username")
})
return context
# ... rest of the code ...
|
6845c56edc315f5ce07f0bf1101d59ee04036024
|
pydir/daemon-rxcmd.py
|
pydir/daemon-rxcmd.py
|
import bluetooth
import os
import logging
import time
from daemon import runner
class RxCmdDaemon():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/RxCmdDaemon.pid'
self.pidfile_timeout = 5
def run(self):
while True:
server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
port = 1
server_sock.bind(("",port))
server_sock.listen(1)
client_sock,address = server_sock.accept()
print "Accepted connection from ",address
try:
while True:
data = client_sock.recv(1024)
print "received [%s]" % data
os.system(data)
except Exception as e:
logging.exception(e)
rxCmdDaemon = RxCmdDaemon()
daemon_runner = runner.DaemonRunner(rxCmdDaemon)
daemon_runner.do_action()
|
import bluetooth
import os
import logging
import time
from daemon import runner
class RxCmdDaemon():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/RxCmdDaemon.pid'
self.pidfile_timeout = 5
def run(self):
while True:
server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
port = 1
server_sock.bind(("",port))
server_sock.listen(1)
client_sock,address = server_sock.accept()
print "Accepted connection from ",address
try:
while True:
data = client_sock.recv(1024)
print "received [%s]" % data
os.system(data)
except Exception as e:
logging.exception(e)
while True:
try:
rxCmdDaemon = RxCmdDaemon()
daemon_runner = runner.DaemonRunner(rxCmdDaemon)
daemon_runner.do_action()
except Exception as e:
logging.exception(e)
|
Add try/catch to improve error handling
|
Add try/catch to improve error handling
|
Python
|
apache-2.0
|
javatechs/RxCmd,javatechs/RxCmd,javatechs/RxCmd
|
python
|
## Code Before:
import bluetooth
import os
import logging
import time
from daemon import runner
class RxCmdDaemon():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/RxCmdDaemon.pid'
self.pidfile_timeout = 5
def run(self):
while True:
server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
port = 1
server_sock.bind(("",port))
server_sock.listen(1)
client_sock,address = server_sock.accept()
print "Accepted connection from ",address
try:
while True:
data = client_sock.recv(1024)
print "received [%s]" % data
os.system(data)
except Exception as e:
logging.exception(e)
rxCmdDaemon = RxCmdDaemon()
daemon_runner = runner.DaemonRunner(rxCmdDaemon)
daemon_runner.do_action()
## Instruction:
Add try/catch to improve error handling
## Code After:
import bluetooth
import os
import logging
import time
from daemon import runner
class RxCmdDaemon():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/RxCmdDaemon.pid'
self.pidfile_timeout = 5
def run(self):
while True:
server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )
port = 1
server_sock.bind(("",port))
server_sock.listen(1)
client_sock,address = server_sock.accept()
print "Accepted connection from ",address
try:
while True:
data = client_sock.recv(1024)
print "received [%s]" % data
os.system(data)
except Exception as e:
logging.exception(e)
while True:
try:
rxCmdDaemon = RxCmdDaemon()
daemon_runner = runner.DaemonRunner(rxCmdDaemon)
daemon_runner.do_action()
except Exception as e:
logging.exception(e)
|
...
except Exception as e:
logging.exception(e)
while True:
try:
rxCmdDaemon = RxCmdDaemon()
daemon_runner = runner.DaemonRunner(rxCmdDaemon)
daemon_runner.do_action()
except Exception as e:
logging.exception(e)
...
|
3fff79f7dd1d8821a2d6c730f2aff3f994a15558
|
crypto/s2n_hkdf.h
|
crypto/s2n_hkdf.h
|
/*
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <stdint.h>
#include "utils/s2n_blob.h"
#include "crypto/s2n_hmac.h"
extern int s2n_hkdf(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, const struct s2n_blob *info, struct s2n_blob *output);
|
/*
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <stdint.h>
#include "utils/s2n_blob.h"
#include "crypto/s2n_hmac.h"
extern int s2n_hkdf(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, const struct s2n_blob *info, struct s2n_blob *output);
extern int s2n_hkdf_extract(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, struct s2n_blob *pseudo_rand_key);
extern int s2n_hkdf_expand_label(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *secret, const struct s2n_blob *label,
const struct s2n_blob *context, struct s2n_blob *output);
|
Add missing header changes from previous commit
|
Add missing header changes from previous commit
|
C
|
apache-2.0
|
wcs1only/s2n,awslabs/s2n,gibson-compsci/s2n,PKRoma/s2n,awslabs/s2n,PKRoma/s2n,colmmacc/s2n,alexeblee/s2n,raycoll/s2n,raycoll/s2n,wcs1only/s2n,PKRoma/s2n,alexeblee/s2n,gibson-compsci/s2n,wcs1only/s2n,colmmacc/s2n,raycoll/s2n,wcs1only/s2n,colmmacc/s2n,wcs1only/s2n,awslabs/s2n,raycoll/s2n,PKRoma/s2n,PKRoma/s2n,alexeblee/s2n,raycoll/s2n,alexeblee/s2n,gibson-compsci/s2n,alexeblee/s2n,gibson-compsci/s2n,wcs1only/s2n,PKRoma/s2n,gibson-compsci/s2n,PKRoma/s2n,wcs1only/s2n,wcs1only/s2n,colmmacc/s2n,awslabs/s2n,awslabs/s2n,colmmacc/s2n,raycoll/s2n,colmmacc/s2n,awslabs/s2n,alexeblee/s2n,gibson-compsci/s2n,PKRoma/s2n
|
c
|
## Code Before:
/*
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <stdint.h>
#include "utils/s2n_blob.h"
#include "crypto/s2n_hmac.h"
extern int s2n_hkdf(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, const struct s2n_blob *info, struct s2n_blob *output);
## Instruction:
Add missing header changes from previous commit
## Code After:
/*
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <stdint.h>
#include "utils/s2n_blob.h"
#include "crypto/s2n_hmac.h"
extern int s2n_hkdf(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, const struct s2n_blob *info, struct s2n_blob *output);
extern int s2n_hkdf_extract(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, struct s2n_blob *pseudo_rand_key);
extern int s2n_hkdf_expand_label(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *secret, const struct s2n_blob *label,
const struct s2n_blob *context, struct s2n_blob *output);
|
...
extern int s2n_hkdf(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, const struct s2n_blob *info, struct s2n_blob *output);
extern int s2n_hkdf_extract(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *salt,
const struct s2n_blob *key, struct s2n_blob *pseudo_rand_key);
extern int s2n_hkdf_expand_label(struct s2n_hmac_state *hmac, s2n_hmac_algorithm alg, const struct s2n_blob *secret, const struct s2n_blob *label,
const struct s2n_blob *context, struct s2n_blob *output);
...
|
ff9d6bc72673843fcdf6f7e0d866beec5bdb45f0
|
mezzanine/accounts/models.py
|
mezzanine/accounts/models.py
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
Profile.objects.get_or_create(**{str(user_field): instance})
|
from django.db import connection
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
try:
Profile.objects.get_or_create(**{str(user_field): instance})
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
|
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
|
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
|
Python
|
bsd-2-clause
|
jjz/mezzanine,cccs-web/mezzanine,stephenmcd/mezzanine,Kniyl/mezzanine,gradel/mezzanine,mush42/mezzanine,dekomote/mezzanine-modeltranslation-backport,eino-makitalo/mezzanine,dsanders11/mezzanine,cccs-web/mezzanine,christianwgd/mezzanine,scarcry/snm-mezzanine,theclanks/mezzanine,webounty/mezzanine,dovydas/mezzanine,nikolas/mezzanine,douglaskastle/mezzanine,readevalprint/mezzanine,dovydas/mezzanine,emile2016/mezzanine,ZeroXn/mezzanine,vladir/mezzanine,damnfine/mezzanine,vladir/mezzanine,damnfine/mezzanine,stbarnabas/mezzanine,frankchin/mezzanine,stephenmcd/mezzanine,saintbird/mezzanine,Cajoline/mezzanine,Kniyl/mezzanine,wyzex/mezzanine,jjz/mezzanine,readevalprint/mezzanine,viaregio/mezzanine,jerivas/mezzanine,promil23/mezzanine,jerivas/mezzanine,spookylukey/mezzanine,viaregio/mezzanine,nikolas/mezzanine,douglaskastle/mezzanine,Kniyl/mezzanine,sjuxax/mezzanine,webounty/mezzanine,promil23/mezzanine,PegasusWang/mezzanine,biomassives/mezzanine,scarcry/snm-mezzanine,PegasusWang/mezzanine,SoLoHiC/mezzanine,nikolas/mezzanine,agepoly/mezzanine,fusionbox/mezzanine,frankier/mezzanine,PegasusWang/mezzanine,fusionbox/mezzanine,geodesign/mezzanine,sjdines/mezzanine,batpad/mezzanine,AlexHill/mezzanine,scarcry/snm-mezzanine,molokov/mezzanine,viaregio/mezzanine,dekomote/mezzanine-modeltranslation-backport,wbtuomela/mezzanine,promil23/mezzanine,wbtuomela/mezzanine,eino-makitalo/mezzanine,wyzex/mezzanine,saintbird/mezzanine,stephenmcd/mezzanine,joshcartme/mezzanine,gradel/mezzanine,wyzex/mezzanine,adrian-the-git/mezzanine,emile2016/mezzanine,geodesign/mezzanine,frankier/mezzanine,dovydas/mezzanine,industrydive/mezzanine,emile2016/mezzanine,stbarnabas/mezzanine,agepoly/mezzanine,SoLoHiC/mezzanine,readevalprint/mezzanine,SoLoHiC/mezzanine,dustinrb/mezzanine,biomassives/mezzanine,christianwgd/mezzanine,tuxinhang1989/mezzanine,theclanks/mezzanine,sjdines/mezzanine,tuxinhang1989/mezzanine,webounty/mezzanine,Cajoline/mezzanine,dsanders11/mezzanine,jjz/mezzanine,Skytorn86/mezzanine,ryneeverett/mezzanine,joshcartme/mezzanine,Skytorn86/mezzanine,biomassives/mezzanine,agepoly/mezzanine,spookylukey/mezzanine,ZeroXn/mezzanine,spookylukey/mezzanine,industrydive/mezzanine,joshcartme/mezzanine,eino-makitalo/mezzanine,mush42/mezzanine,christianwgd/mezzanine,ryneeverett/mezzanine,jerivas/mezzanine,dustinrb/mezzanine,theclanks/mezzanine,sjdines/mezzanine,vladir/mezzanine,Cicero-Zhao/mezzanine,adrian-the-git/mezzanine,Cicero-Zhao/mezzanine,industrydive/mezzanine,gradel/mezzanine,dekomote/mezzanine-modeltranslation-backport,sjuxax/mezzanine,douglaskastle/mezzanine,adrian-the-git/mezzanine,AlexHill/mezzanine,damnfine/mezzanine,Cajoline/mezzanine,frankchin/mezzanine,molokov/mezzanine,dsanders11/mezzanine,ryneeverett/mezzanine,tuxinhang1989/mezzanine,wbtuomela/mezzanine,ZeroXn/mezzanine,Skytorn86/mezzanine,batpad/mezzanine,mush42/mezzanine,frankchin/mezzanine,saintbird/mezzanine,sjuxax/mezzanine,frankier/mezzanine,molokov/mezzanine,geodesign/mezzanine,dustinrb/mezzanine
|
python
|
## Code Before:
from django.db.models.signals import post_save
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
Profile.objects.get_or_create(**{str(user_field): instance})
## Instruction:
Allow initial user creation in syncdb when a profile model is managed by migrations and doesn't yet exist.
## Code After:
from django.db import connection
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
from mezzanine.accounts import get_profile_model, get_profile_user_fieldname
# Signal for ensuring users have a profile instance.
Profile = get_profile_model()
User = get_user_model()
if Profile:
user_field = get_profile_user_fieldname()
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
try:
Profile.objects.get_or_create(**{str(user_field): instance})
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
|
// ... existing code ...
from django.db import connection
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from mezzanine.utils.models import get_user_model
// ... modified code ...
@receiver(post_save, sender=User)
def user_saved(sender=None, instance=None, **kwargs):
try:
Profile.objects.get_or_create(**{str(user_field): instance})
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
// ... rest of the code ...
|
96924aea75dbbe82fec6c23df405a15e0bfeeac0
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='[email protected]',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='[email protected]',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
Exclude tests folder from dist
|
Exclude tests folder from dist
|
Python
|
bsd-2-clause
|
incuna/django-pgcrypto-fields,atdsaa/django-pgcrypto-fields
|
python
|
## Code Before:
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='[email protected]',
url='https://github.com/incuna/django-pgcrypto-fields',
)
## Instruction:
Exclude tests folder from dist
## Code After:
from setuptools import find_packages, setup
version = '1.0.0'
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
description='Encrypted fields dealing with pgcrypto postgres extension.',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Topic :: Database',
'Topic :: Security :: Cryptography',
],
author='Incuna Ltd',
author_email='[email protected]',
url='https://github.com/incuna/django-pgcrypto-fields',
)
|
# ... existing code ...
setup(
name='django-pgcrypto-fields',
packages=find_packages(exclude=['tests']),
include_package_data=True,
version=version,
license='BSD',
# ... rest of the code ...
|
2733cf558a7455eb017ec4690307a2ee18afbd8b
|
blogtrans.py
|
blogtrans.py
|
from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
|
from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
|
Load blog data from command line
|
Load blog data from command line
|
Python
|
mit
|
miaout17/blogtrans,miaout17/blogtrans
|
python
|
## Code Before:
from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
## Instruction:
Load blog data from command line
## Code After:
from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
|
// ... existing code ...
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
// ... modified code ...
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
// ... rest of the code ...
|
71fef8b9696d79f7d6fd024320bc23ce1b7425f3
|
greatbigcrane/preferences/models.py
|
greatbigcrane/preferences/models.py
|
from django.db import models
class Preference(models.Model):
name = models.CharField(max_length=32, unique=True)
value = models.CharField(max_length=512)
|
from django.db import models
class PreferenceManager(models.Manager):
def get_preference(self, name, default=None):
try:
value = Preference.objects.get(name="projects_directory").value
except Preference.DoesNotExist:
return default
class Preference(models.Model):
name = models.CharField(max_length=32, unique=True)
value = models.CharField(max_length=512)
objects = PreferenceManager()
|
Add a manager to make getting preferences prettier.
|
Add a manager to make getting preferences prettier.
|
Python
|
apache-2.0
|
pnomolos/greatbigcrane,pnomolos/greatbigcrane
|
python
|
## Code Before:
from django.db import models
class Preference(models.Model):
name = models.CharField(max_length=32, unique=True)
value = models.CharField(max_length=512)
## Instruction:
Add a manager to make getting preferences prettier.
## Code After:
from django.db import models
class PreferenceManager(models.Manager):
def get_preference(self, name, default=None):
try:
value = Preference.objects.get(name="projects_directory").value
except Preference.DoesNotExist:
return default
class Preference(models.Model):
name = models.CharField(max_length=32, unique=True)
value = models.CharField(max_length=512)
objects = PreferenceManager()
|
...
from django.db import models
class PreferenceManager(models.Manager):
def get_preference(self, name, default=None):
try:
value = Preference.objects.get(name="projects_directory").value
except Preference.DoesNotExist:
return default
class Preference(models.Model):
name = models.CharField(max_length=32, unique=True)
value = models.CharField(max_length=512)
objects = PreferenceManager()
...
|
9f3356d06067dbcc77a79afee6bccf80600dab28
|
server/systeminfo.py
|
server/systeminfo.py
|
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
|
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
|
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
|
Python
|
mit
|
juliendelplanque/raspirestmonitor
|
python
|
## Code Before:
import subprocess
def get_uptime():
""" Return the uptime of the system as a str using the command: $ uptime
"""
proc = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = output.decode("utf-8").split(",")[0]
uptime = uptime[uptime.find("up")+3:len(uptime)] # extract uptime
return uptime
## Instruction:
Add a method to get the idle time. Also data are directly readed in /proc/uptime.
## Code After:
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
|
...
import subprocess
from datetime import timedelta
def get_uptime():
""" Return the uptime of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
uptime = int(output.decode("utf-8").split()[0].split(".")[0])
s = uptime % 60
m = int((uptime/60) % 60)
h = int((uptime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
def get_idletime():
""" Return the idle time of the system as a timedelta object.
"""
proc = subprocess.Popen(["cat /proc/uptime"],
stdout=subprocess.PIPE, shell=True)
(output, error) = proc.communicate()
idletime = int(output.decode("utf-8").split()[1].split(".")[0])
s = idletime % 60
m = int((idletime/60) % 60)
h = int((idletime/(60*60) % 24))
return timedelta(hours=h, minutes=m, seconds=s)
...
|
334e794be7514c032e6db4c39761d67820c405ff
|
oscar/management/commands/oscar_update_product_ratings.py
|
oscar/management/commands/oscar_update_product_ratings.py
|
from django.core.management.base import BaseCommand
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
ProductReview = get_model('reviews', 'ProductReview')
class Command(BaseCommand):
help = """Update the denormalised reviews average on all Product instances.
Should only be necessary when changing to e.g. a weight-based
rating."""
def handle(self, *args, **options):
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
ProductReview.update_product_rating(product)
self.stdout.write('Successfully updated %s products\n'
% products.count())
|
from django.core.management.base import BaseCommand
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
class Command(BaseCommand):
help = """Update the denormalised reviews average on all Product instances.
Should only be necessary when changing to e.g. a weight-based
rating."""
def handle(self, *args, **options):
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
product.update_rating()
self.stdout.write(
'Successfully updated %s products\n' % products.count())
|
Fix bug in management command for updating ratings
|
Fix bug in management command for updating ratings
|
Python
|
bsd-3-clause
|
kapt/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,lijoantony/django-oscar,ahmetdaglarbas/e-commerce,dongguangming/django-oscar,nickpack/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,manevant/django-oscar,jinnykoo/wuyisj,eddiep1101/django-oscar,pdonadeo/django-oscar,taedori81/django-oscar,mexeniz/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj.com,QLGu/django-oscar,amirrpp/django-oscar,anentropic/django-oscar,okfish/django-oscar,bnprk/django-oscar,marcoantoniooliveira/labweb,michaelkuty/django-oscar,Bogh/django-oscar,makielab/django-oscar,adamend/django-oscar,bschuon/django-oscar,bschuon/django-oscar,faratro/django-oscar,manevant/django-oscar,thechampanurag/django-oscar,pdonadeo/django-oscar,manevant/django-oscar,jinnykoo/christmas,Idematica/django-oscar,itbabu/django-oscar,faratro/django-oscar,john-parton/django-oscar,Jannes123/django-oscar,thechampanurag/django-oscar,mexeniz/django-oscar,mexeniz/django-oscar,ademuk/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,nickpack/django-oscar,john-parton/django-oscar,kapari/django-oscar,nickpack/django-oscar,pasqualguerrero/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,jinnykoo/christmas,spartonia/django-oscar,adamend/django-oscar,dongguangming/django-oscar,solarissmoke/django-oscar,josesanch/django-oscar,WadeYuChen/django-oscar,ademuk/django-oscar,monikasulik/django-oscar,monikasulik/django-oscar,dongguangming/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,sonofatailor/django-oscar,WillisXChen/django-oscar,jinnykoo/christmas,marcoantoniooliveira/labweb,amirrpp/django-oscar,ka7eh/django-oscar,QLGu/django-oscar,QLGu/django-oscar,vovanbo/django-oscar,thechampanurag/django-oscar,Idematica/django-oscar,rocopartners/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj.com,nickpack/django-oscar,vovanbo/django-oscar,jlmadurga/django-oscar,elliotthill/django-oscar,anentropic/django-oscar,DrOctogon/unwash_ecom,WillisXChen/django-oscar,jlmadurga/django-oscar,jlmadurga/django-oscar,makielab/django-oscar,DrOctogon/unwash_ecom,Bogh/django-oscar,WadeYuChen/django-oscar,saadatqadri/django-oscar,ka7eh/django-oscar,itbabu/django-oscar,eddiep1101/django-oscar,adamend/django-oscar,jinnykoo/wuyisj.com,rocopartners/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,adamend/django-oscar,machtfit/django-oscar,django-oscar/django-oscar,spartonia/django-oscar,MatthewWilkes/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,faratro/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,amirrpp/django-oscar,Jannes123/django-oscar,manevant/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,saadatqadri/django-oscar,kapt/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,Bogh/django-oscar,MatthewWilkes/django-oscar,jmt4/django-oscar,MatthewWilkes/django-oscar,michaelkuty/django-oscar,ahmetdaglarbas/e-commerce,okfish/django-oscar,ahmetdaglarbas/e-commerce,thechampanurag/django-oscar,elliotthill/django-oscar,elliotthill/django-oscar,monikasulik/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,michaelkuty/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,MatthewWilkes/django-oscar,WadeYuChen/django-oscar,django-oscar/django-oscar,lijoantony/django-oscar,okfish/django-oscar,monikasulik/django-oscar,lijoantony/django-oscar,ka7eh/django-oscar,solarissmoke/django-oscar,sonofatailor/django-oscar,pasqualguerrero/django-oscar,spartonia/django-oscar,Idematica/django-oscar,binarydud/django-oscar,binarydud/django-oscar,jmt4/django-oscar,machtfit/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,pdonadeo/django-oscar,Jannes123/django-oscar,rocopartners/django-oscar,okfish/django-oscar,anentropic/django-oscar,nfletton/django-oscar,jlmadurga/django-oscar,jinnykoo/wuyisj,anentropic/django-oscar,nfletton/django-oscar,bnprk/django-oscar,saadatqadri/django-oscar,john-parton/django-oscar,Jannes123/django-oscar,john-parton/django-oscar,michaelkuty/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,makielab/django-oscar,kapari/django-oscar,jinnykoo/wuyisj,nfletton/django-oscar,bnprk/django-oscar,QLGu/django-oscar,josesanch/django-oscar,bschuon/django-oscar,nfletton/django-oscar,dongguangming/django-oscar,saadatqadri/django-oscar,itbabu/django-oscar,lijoantony/django-oscar,kapari/django-oscar,kapt/django-oscar,Bogh/django-oscar,bnprk/django-oscar,sasha0/django-oscar,bschuon/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,josesanch/django-oscar,makielab/django-oscar,sasha0/django-oscar,machtfit/django-oscar,WadeYuChen/django-oscar,binarydud/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,itbabu/django-oscar,pdonadeo/django-oscar
|
python
|
## Code Before:
from django.core.management.base import BaseCommand
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
ProductReview = get_model('reviews', 'ProductReview')
class Command(BaseCommand):
help = """Update the denormalised reviews average on all Product instances.
Should only be necessary when changing to e.g. a weight-based
rating."""
def handle(self, *args, **options):
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
ProductReview.update_product_rating(product)
self.stdout.write('Successfully updated %s products\n'
% products.count())
## Instruction:
Fix bug in management command for updating ratings
## Code After:
from django.core.management.base import BaseCommand
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
class Command(BaseCommand):
help = """Update the denormalised reviews average on all Product instances.
Should only be necessary when changing to e.g. a weight-based
rating."""
def handle(self, *args, **options):
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
product.update_rating()
self.stdout.write(
'Successfully updated %s products\n' % products.count())
|
// ... existing code ...
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
class Command(BaseCommand):
// ... modified code ...
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
product.update_rating()
self.stdout.write(
'Successfully updated %s products\n' % products.count())
// ... rest of the code ...
|
1556e7a89b052f327a6c713cf6b5be3084bde430
|
app/app/src/main/java/me/williamhester/reddit/ui/activities/ContentActivity.java
|
app/app/src/main/java/me/williamhester/reddit/ui/activities/ContentActivity.java
|
package me.williamhester.reddit.ui.activities;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import me.williamhester.reddit.R;
import me.williamhester.reddit.models.Submission;
import me.williamhester.reddit.ui.fragments.CommentsFragment;
/** Activity that holds basic content. */
public class ContentActivity extends BaseActivity {
public static final String TYPE_EXTRA = "type";
public static final String VOTABLE_EXTRA = "votable";
public static final String PERMALINK_EXTRA = "permalink";
public static final String COMMENTS = "comments";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.fragment_container, createContentFragment())
.commit();
}
SharedPreferences prefs = getSharedPreferences("default", MODE_PRIVATE);
prefs.edit().putBoolean("test", true).apply();
}
@Override
protected int getLayoutId() {
return R.layout.activity_content;
}
protected Fragment createContentFragment() {
Bundle args = getIntent().getExtras();
String type = args.getString(TYPE_EXTRA);
if (type == null) {
return null;
}
switch (type) {
case COMMENTS:
String permalink = args.getString(PERMALINK_EXTRA);
Submission s = args.getParcelable(VOTABLE_EXTRA);
return CommentsFragment.newInstance(permalink, s);
default:
return null;
}
}
}
|
package me.williamhester.reddit.ui.activities;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import me.williamhester.reddit.R;
import me.williamhester.reddit.models.Submission;
import me.williamhester.reddit.ui.fragments.CommentsFragment;
/** Activity that holds basic content. */
public class ContentActivity extends BaseActivity {
public static final String TYPE_EXTRA = "type";
public static final String VOTABLE_EXTRA = "votable";
public static final String PERMALINK_EXTRA = "permalink";
public static final String COMMENTS = "comments";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.fragment_container, createContentFragment())
.commit();
}
}
@Override
protected int getLayoutId() {
return R.layout.activity_content;
}
protected Fragment createContentFragment() {
Bundle args = getIntent().getExtras();
String type = args.getString(TYPE_EXTRA);
if (type == null) {
return null;
}
switch (type) {
case COMMENTS:
String permalink = args.getString(PERMALINK_EXTRA);
Submission s = args.getParcelable(VOTABLE_EXTRA);
return CommentsFragment.newInstance(permalink, s);
default:
return null;
}
}
}
|
Remove accidentally committed test code
|
Remove accidentally committed test code
|
Java
|
apache-2.0
|
WilliamHester/Breadit-2
|
java
|
## Code Before:
package me.williamhester.reddit.ui.activities;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import me.williamhester.reddit.R;
import me.williamhester.reddit.models.Submission;
import me.williamhester.reddit.ui.fragments.CommentsFragment;
/** Activity that holds basic content. */
public class ContentActivity extends BaseActivity {
public static final String TYPE_EXTRA = "type";
public static final String VOTABLE_EXTRA = "votable";
public static final String PERMALINK_EXTRA = "permalink";
public static final String COMMENTS = "comments";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.fragment_container, createContentFragment())
.commit();
}
SharedPreferences prefs = getSharedPreferences("default", MODE_PRIVATE);
prefs.edit().putBoolean("test", true).apply();
}
@Override
protected int getLayoutId() {
return R.layout.activity_content;
}
protected Fragment createContentFragment() {
Bundle args = getIntent().getExtras();
String type = args.getString(TYPE_EXTRA);
if (type == null) {
return null;
}
switch (type) {
case COMMENTS:
String permalink = args.getString(PERMALINK_EXTRA);
Submission s = args.getParcelable(VOTABLE_EXTRA);
return CommentsFragment.newInstance(permalink, s);
default:
return null;
}
}
}
## Instruction:
Remove accidentally committed test code
## Code After:
package me.williamhester.reddit.ui.activities;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import me.williamhester.reddit.R;
import me.williamhester.reddit.models.Submission;
import me.williamhester.reddit.ui.fragments.CommentsFragment;
/** Activity that holds basic content. */
public class ContentActivity extends BaseActivity {
public static final String TYPE_EXTRA = "type";
public static final String VOTABLE_EXTRA = "votable";
public static final String PERMALINK_EXTRA = "permalink";
public static final String COMMENTS = "comments";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Fragment f = getSupportFragmentManager().findFragmentById(R.id.fragment_container);
if (f == null) {
getSupportFragmentManager().beginTransaction()
.add(R.id.fragment_container, createContentFragment())
.commit();
}
}
@Override
protected int getLayoutId() {
return R.layout.activity_content;
}
protected Fragment createContentFragment() {
Bundle args = getIntent().getExtras();
String type = args.getString(TYPE_EXTRA);
if (type == null) {
return null;
}
switch (type) {
case COMMENTS:
String permalink = args.getString(PERMALINK_EXTRA);
Submission s = args.getParcelable(VOTABLE_EXTRA);
return CommentsFragment.newInstance(permalink, s);
default:
return null;
}
}
}
|
...
package me.williamhester.reddit.ui.activities;
import android.os.Bundle;
import android.support.v4.app.Fragment;
...
.add(R.id.fragment_container, createContentFragment())
.commit();
}
}
@Override
...
|
54a13d2d9754a44f26dbe5f8b4485267a13a0217
|
drudge/canonpy.h
|
drudge/canonpy.h
|
/* vim: set filetype=cpp: */
/** Header file for canonpy.
*
* Currently it merely contains the definition of the object structure of the
* classes defined in canonpy. They are put here in case a C API is intended
* to be added for canonpy.
*/
#ifndef DRUDGE_CANONPY_H
#define DRUDGE_CANONPY_H
#include <Python.h>
#include <memory>
#include <libcanon/perm.h>
#include <libcanon/sims.h>
using libcanon::Simple_perm;
using libcanon::Sims_transv;
//
// Permutation type
// ----------------
//
/** Object type for canonpy Perm objects.
*/
// clang-format off
typedef struct {
PyObject_HEAD
Simple_perm perm;
} Perm_object;
// clang-format on
//
// Permutation group type
// ----------------------
//
// clang-format off
typedef struct {
PyObject_HEAD
std::unique_ptr<Sims_transv<Simple_perm>> transv;
} Group_object;
// clang-format on
#endif
|
/* vim: set filetype=cpp: */
/** Header file for canonpy.
*
* Currently it merely contains the definition of the object structure of the
* classes defined in canonpy. They are put here in case a C API is intended
* to be added for canonpy.
*/
#ifndef DRUDGE_CANONPY_H
#define DRUDGE_CANONPY_H
#include <Python.h>
#include <memory>
#include <libcanon/perm.h>
#include <libcanon/sims.h>
using libcanon::Simple_perm;
using libcanon::Sims_transv;
//
// Permutation type
// ----------------
//
/** Object type for canonpy Perm objects.
*/
// clang-format off
typedef struct {
PyObject_HEAD
Simple_perm perm;
} Perm_object;
// clang-format on
//
// Permutation group type
// ----------------------
//
using Transv = Sims_transv<Simple_perm>;
using Transv_ptr = std::unique_ptr<Transv>;
// clang-format off
typedef struct {
PyObject_HEAD
Transv_ptr transv;
} Group_object;
// clang-format on
#endif
|
Add type aliases for Group
|
Add type aliases for Group
With these aliases, the code for permutation group manipulation can be
written more succinctly.
|
C
|
mit
|
tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge
|
c
|
## Code Before:
/* vim: set filetype=cpp: */
/** Header file for canonpy.
*
* Currently it merely contains the definition of the object structure of the
* classes defined in canonpy. They are put here in case a C API is intended
* to be added for canonpy.
*/
#ifndef DRUDGE_CANONPY_H
#define DRUDGE_CANONPY_H
#include <Python.h>
#include <memory>
#include <libcanon/perm.h>
#include <libcanon/sims.h>
using libcanon::Simple_perm;
using libcanon::Sims_transv;
//
// Permutation type
// ----------------
//
/** Object type for canonpy Perm objects.
*/
// clang-format off
typedef struct {
PyObject_HEAD
Simple_perm perm;
} Perm_object;
// clang-format on
//
// Permutation group type
// ----------------------
//
// clang-format off
typedef struct {
PyObject_HEAD
std::unique_ptr<Sims_transv<Simple_perm>> transv;
} Group_object;
// clang-format on
#endif
## Instruction:
Add type aliases for Group
With these aliases, the code for permutation group manipulation can be
written more succinctly.
## Code After:
/* vim: set filetype=cpp: */
/** Header file for canonpy.
*
* Currently it merely contains the definition of the object structure of the
* classes defined in canonpy. They are put here in case a C API is intended
* to be added for canonpy.
*/
#ifndef DRUDGE_CANONPY_H
#define DRUDGE_CANONPY_H
#include <Python.h>
#include <memory>
#include <libcanon/perm.h>
#include <libcanon/sims.h>
using libcanon::Simple_perm;
using libcanon::Sims_transv;
//
// Permutation type
// ----------------
//
/** Object type for canonpy Perm objects.
*/
// clang-format off
typedef struct {
PyObject_HEAD
Simple_perm perm;
} Perm_object;
// clang-format on
//
// Permutation group type
// ----------------------
//
using Transv = Sims_transv<Simple_perm>;
using Transv_ptr = std::unique_ptr<Transv>;
// clang-format off
typedef struct {
PyObject_HEAD
Transv_ptr transv;
} Group_object;
// clang-format on
#endif
|
...
// ----------------------
//
using Transv = Sims_transv<Simple_perm>;
using Transv_ptr = std::unique_ptr<Transv>;
// clang-format off
typedef struct {
PyObject_HEAD
Transv_ptr transv;
} Group_object;
// clang-format on
...
|
d73367557183f18f2797964dcad7ce07ce2ae0c6
|
src/main/java/org/realityforge/replicant/client/json/gwt/ReplicantConfig.java
|
src/main/java/org/realityforge/replicant/client/json/gwt/ReplicantConfig.java
|
package org.realityforge.replicant.client.json.gwt;
import org.realityforge.gwt.propertysource.client.PropertySource;
import org.realityforge.gwt.propertysource.client.annotations.Namespace;
import org.realityforge.gwt.propertysource.client.annotations.Property;
@Namespace("replicant")
public interface ReplicantConfig
extends PropertySource
{
@Property( "shouldValidateRepositoryOnLoad" )
boolean shouldValidateRepositoryOnLoad();
}
|
package org.realityforge.replicant.client.json.gwt;
import org.realityforge.gwt.propertysource.client.PropertySource;
import org.realityforge.gwt.propertysource.client.annotations.Namespace;
@Namespace("replicant")
public interface ReplicantConfig
extends PropertySource
{
boolean shouldValidateRepositoryOnLoad();
}
|
Remove unnecessary annotation as it matches the method name
|
Remove unnecessary annotation as it matches the method name
|
Java
|
apache-2.0
|
realityforge/replicant,realityforge/replicant
|
java
|
## Code Before:
package org.realityforge.replicant.client.json.gwt;
import org.realityforge.gwt.propertysource.client.PropertySource;
import org.realityforge.gwt.propertysource.client.annotations.Namespace;
import org.realityforge.gwt.propertysource.client.annotations.Property;
@Namespace("replicant")
public interface ReplicantConfig
extends PropertySource
{
@Property( "shouldValidateRepositoryOnLoad" )
boolean shouldValidateRepositoryOnLoad();
}
## Instruction:
Remove unnecessary annotation as it matches the method name
## Code After:
package org.realityforge.replicant.client.json.gwt;
import org.realityforge.gwt.propertysource.client.PropertySource;
import org.realityforge.gwt.propertysource.client.annotations.Namespace;
@Namespace("replicant")
public interface ReplicantConfig
extends PropertySource
{
boolean shouldValidateRepositoryOnLoad();
}
|
// ... existing code ...
import org.realityforge.gwt.propertysource.client.PropertySource;
import org.realityforge.gwt.propertysource.client.annotations.Namespace;
@Namespace("replicant")
public interface ReplicantConfig
extends PropertySource
{
boolean shouldValidateRepositoryOnLoad();
}
// ... rest of the code ...
|
3da8e205ee031c64f436ffae9d107cb0b537a1b7
|
app/src/main/java/com/blocksolid/income/TaxCalculator.java
|
app/src/main/java/com/blocksolid/income/TaxCalculator.java
|
package com.blocksolid.income;
/**
* Created by Dan Buckland on 16/09/2015.
*/
public class TaxCalculator {
int MAX_PERSONAL_ALLOWANCE = 1060000; // £10,600
int PERSONAL_ALLOWANCE_THRESHOLD = 10000000; // £100,000
int BASIC_RATE_THRESHOLD = 3178500; // £31,785
int HIGHER_RATE_THRESHOLD = 15000000; // £150,000
int annualIncome;
int personalAllowance;
public TaxCalculator(int annualIncome) {
this.annualIncome = annualIncome;
calculatePersonalAllowance();
//calculateDeductions(personalAllowance);
}
public void setAnnualIncome(int newAnnualIncome) {
annualIncome = newAnnualIncome;
}
public int getAnnualIncome() {
return annualIncome;
}
public int calculatePersonalAllowance() {
// TODO calculate the personal allowance that's needed to calculate tax deductions
return personalAllowance;
}
}
|
package com.blocksolid.income;
/**
* Created by Dan Buckland on 16/09/2015.
*/
public class TaxCalculator {
int MAX_PERSONAL_ALLOWANCE = 1060000; // £10,600
int PERSONAL_ALLOWANCE_THRESHOLD = 10000000; // £100,000
int BASIC_RATE_THRESHOLD = 3178500; // £31,785
int HIGHER_RATE_THRESHOLD = 15000000; // £150,000
int annualIncome;
int personalAllowance;
public TaxCalculator(int annualIncome) {
this.annualIncome = annualIncome;
calculatePersonalAllowance();
//calculateDeductions(personalAllowance);
}
public void setAnnualIncome(int newAnnualIncome) {
annualIncome = newAnnualIncome;
}
public int getAnnualIncome() {
return annualIncome;
}
public int calculatePersonalAllowance() {
if (annualIncome > PERSONAL_ALLOWANCE_THRESHOLD) {
int difference = annualIncome - PERSONAL_ALLOWANCE_THRESHOLD;
if (difference % 200 > 0) {
difference = difference - 100;
}
personalAllowance = MAX_PERSONAL_ALLOWANCE - (difference / 2);
if (personalAllowance < 0) {
personalAllowance = 0;
}
} else {
personalAllowance = MAX_PERSONAL_ALLOWANCE;
}
return personalAllowance;
}
}
|
Add calculatePersonalAllowance method to pass unit tests
|
Add calculatePersonalAllowance method to pass unit tests
|
Java
|
mit
|
danbuckland/income-tax-android,danbuckland/income-tax-android
|
java
|
## Code Before:
package com.blocksolid.income;
/**
* Created by Dan Buckland on 16/09/2015.
*/
public class TaxCalculator {
int MAX_PERSONAL_ALLOWANCE = 1060000; // £10,600
int PERSONAL_ALLOWANCE_THRESHOLD = 10000000; // £100,000
int BASIC_RATE_THRESHOLD = 3178500; // £31,785
int HIGHER_RATE_THRESHOLD = 15000000; // £150,000
int annualIncome;
int personalAllowance;
public TaxCalculator(int annualIncome) {
this.annualIncome = annualIncome;
calculatePersonalAllowance();
//calculateDeductions(personalAllowance);
}
public void setAnnualIncome(int newAnnualIncome) {
annualIncome = newAnnualIncome;
}
public int getAnnualIncome() {
return annualIncome;
}
public int calculatePersonalAllowance() {
// TODO calculate the personal allowance that's needed to calculate tax deductions
return personalAllowance;
}
}
## Instruction:
Add calculatePersonalAllowance method to pass unit tests
## Code After:
package com.blocksolid.income;
/**
* Created by Dan Buckland on 16/09/2015.
*/
public class TaxCalculator {
int MAX_PERSONAL_ALLOWANCE = 1060000; // £10,600
int PERSONAL_ALLOWANCE_THRESHOLD = 10000000; // £100,000
int BASIC_RATE_THRESHOLD = 3178500; // £31,785
int HIGHER_RATE_THRESHOLD = 15000000; // £150,000
int annualIncome;
int personalAllowance;
public TaxCalculator(int annualIncome) {
this.annualIncome = annualIncome;
calculatePersonalAllowance();
//calculateDeductions(personalAllowance);
}
public void setAnnualIncome(int newAnnualIncome) {
annualIncome = newAnnualIncome;
}
public int getAnnualIncome() {
return annualIncome;
}
public int calculatePersonalAllowance() {
if (annualIncome > PERSONAL_ALLOWANCE_THRESHOLD) {
int difference = annualIncome - PERSONAL_ALLOWANCE_THRESHOLD;
if (difference % 200 > 0) {
difference = difference - 100;
}
personalAllowance = MAX_PERSONAL_ALLOWANCE - (difference / 2);
if (personalAllowance < 0) {
personalAllowance = 0;
}
} else {
personalAllowance = MAX_PERSONAL_ALLOWANCE;
}
return personalAllowance;
}
}
|
# ... existing code ...
}
public int calculatePersonalAllowance() {
if (annualIncome > PERSONAL_ALLOWANCE_THRESHOLD) {
int difference = annualIncome - PERSONAL_ALLOWANCE_THRESHOLD;
if (difference % 200 > 0) {
difference = difference - 100;
}
personalAllowance = MAX_PERSONAL_ALLOWANCE - (difference / 2);
if (personalAllowance < 0) {
personalAllowance = 0;
}
} else {
personalAllowance = MAX_PERSONAL_ALLOWANCE;
}
return personalAllowance;
}
# ... rest of the code ...
|
419c06a550e854c822d7a9612241acb820a5042e
|
projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
|
projects/OG-Financial/src/com/opengamma/financial/aggregation/CurrencyAggregationFunction.java
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.aggregation;
import com.opengamma.core.position.Position;
import com.opengamma.core.security.Security;
import com.opengamma.financial.security.FinancialSecurity;
import com.opengamma.financial.security.FinancialSecurityVisitor;
import com.opengamma.financial.security.bond.BondSecurity;
import com.opengamma.financial.security.cash.CashSecurity;
import com.opengamma.financial.security.equity.EquitySecurity;
import com.opengamma.financial.security.fra.FRASecurity;
import com.opengamma.financial.security.future.FutureSecurity;
import com.opengamma.financial.security.option.OptionSecurity;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.util.money.Currency;
/**
* Function to classify positions by Currency.
*
*/
public class CurrencyAggregationFunction implements AggregationFunction<Currency> {
private static final String NAME = "Currency";
@Override
public Currency classifyPosition(Position position) {
Security security = position.getSecurity();
if (security instanceof FinancialSecurity) {
FinancialSecurity finSec = (FinancialSecurity) security;
return finSec.accept(new FinancialSecurityVisitor<Currency>() {
@Override
public Currency visitBondSecurity(BondSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCashSecurity(CashSecurity security) {
return null;
}
@Override
public Currency visitEquitySecurity(EquitySecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFRASecurity(FRASecurity security) {
return null;
}
@Override
public Currency visitFutureSecurity(FutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitOptionSecurity(OptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitSwapSecurity(SwapSecurity security) {
return null;
}
});
} else {
return null;
}
}
public String getName() {
return NAME;
}
}
|
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.aggregation;
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.util.money.Currency;
/**
* Function to classify positions by Currency.
*
*/
public class CurrencyAggregationFunction implements AggregationFunction<Currency> {
private static final String NAME = "Currency";
@Override
public Currency classifyPosition(Position position) {
return FinancialSecurityUtils.getCurrencyUnit(position.getSecurity());
}
public String getName() {
return NAME;
}
}
|
Use FinancialSecurityUtils instead of code duplication to to the currency lookup.
|
Use FinancialSecurityUtils instead of code duplication to to the currency lookup.
|
Java
|
apache-2.0
|
nssales/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,McLeodMoores/starling,DevStreet/FinanceAnalytics,jeorme/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform
|
java
|
## Code Before:
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.aggregation;
import com.opengamma.core.position.Position;
import com.opengamma.core.security.Security;
import com.opengamma.financial.security.FinancialSecurity;
import com.opengamma.financial.security.FinancialSecurityVisitor;
import com.opengamma.financial.security.bond.BondSecurity;
import com.opengamma.financial.security.cash.CashSecurity;
import com.opengamma.financial.security.equity.EquitySecurity;
import com.opengamma.financial.security.fra.FRASecurity;
import com.opengamma.financial.security.future.FutureSecurity;
import com.opengamma.financial.security.option.OptionSecurity;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.util.money.Currency;
/**
* Function to classify positions by Currency.
*
*/
public class CurrencyAggregationFunction implements AggregationFunction<Currency> {
private static final String NAME = "Currency";
@Override
public Currency classifyPosition(Position position) {
Security security = position.getSecurity();
if (security instanceof FinancialSecurity) {
FinancialSecurity finSec = (FinancialSecurity) security;
return finSec.accept(new FinancialSecurityVisitor<Currency>() {
@Override
public Currency visitBondSecurity(BondSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitCashSecurity(CashSecurity security) {
return null;
}
@Override
public Currency visitEquitySecurity(EquitySecurity security) {
return security.getCurrency();
}
@Override
public Currency visitFRASecurity(FRASecurity security) {
return null;
}
@Override
public Currency visitFutureSecurity(FutureSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitOptionSecurity(OptionSecurity security) {
return security.getCurrency();
}
@Override
public Currency visitSwapSecurity(SwapSecurity security) {
return null;
}
});
} else {
return null;
}
}
public String getName() {
return NAME;
}
}
## Instruction:
Use FinancialSecurityUtils instead of code duplication to to the currency lookup.
## Code After:
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.aggregation;
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.util.money.Currency;
/**
* Function to classify positions by Currency.
*
*/
public class CurrencyAggregationFunction implements AggregationFunction<Currency> {
private static final String NAME = "Currency";
@Override
public Currency classifyPosition(Position position) {
return FinancialSecurityUtils.getCurrencyUnit(position.getSecurity());
}
public String getName() {
return NAME;
}
}
|
# ... existing code ...
package com.opengamma.financial.aggregation;
import com.opengamma.core.position.Position;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.util.money.Currency;
/**
# ... modified code ...
@Override
public Currency classifyPosition(Position position) {
return FinancialSecurityUtils.getCurrencyUnit(position.getSecurity());
}
public String getName() {
# ... rest of the code ...
|
de958b9fc68ad6209749edbfe2bdde0ef68cf3c8
|
experiments/middleware.py
|
experiments/middleware.py
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
Revert "tidy up ajax page loads so they count towards experiments"
|
Revert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.
|
Python
|
mit
|
mixcloud/django-experiments,bjarnoldus/django-experiments,bjarnoldus/django-experiments,robertobarreda/django-experiments,mixcloud/django-experiments,robertobarreda/django-experiments,squamous/django-experiments,squamous/django-experiments,uhuramedia/django-experiments,mixcloud/django-experiments,bjarnoldus/django-experiments,uhuramedia/django-experiments,squamous/django-experiments,uhuramedia/django-experiments,robertobarreda/django-experiments
|
python
|
## Code Before:
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
## Instruction:
Revert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.
## Code After:
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
# ... existing code ...
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
# ... rest of the code ...
|
03a803bb87478d79f67b20275bc45b56e7c8300f
|
tests/similarity/test_new_similarity.py
|
tests/similarity/test_new_similarity.py
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
Fix incorrect import reference to nw_similarity
|
Fix incorrect import reference to nw_similarity
|
Python
|
mit
|
dpazel/tryinggithub
|
python
|
## Code Before:
import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()
## Instruction:
Fix incorrect import reference to nw_similarity
## Code After:
import unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
...
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
...
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
...
|
074c83285bba8a8805bf35dec9893771220b1715
|
foodsaving/users/stats.py
|
foodsaving/users/stats.py
|
from django.contrib.auth import get_user_model
from django.db.models import Count
from foodsaving.groups.models import GroupMembership
from foodsaving.webhooks.models import EmailEvent
def get_users_stats():
User = get_user_model()
active_users = User.objects.filter(groupmembership__in=GroupMembership.objects.active(), deleted=False).distinct()
active_membership_count = GroupMembership.objects.active().count()
active_users_count = active_users.count()
fields = {
'active_count':
active_users_count,
'active_unverified_count':
active_users.filter(mail_verified=False).count(),
'active_ignored_email_count':
active_users.filter(email__in=EmailEvent.objects.ignored_addresses()).count(),
'active_with_location_count':
active_users.exclude(latitude=None).exclude(longitude=None).count(),
'active_with_mobile_number_count':
active_users.exclude(mobile_number='').count(),
'active_with_description_count':
active_users.exclude(description='').count(),
'active_with_photo_count':
active_users.exclude(photo='').count(),
'active_memberships_per_active_user_avg':
active_membership_count / active_users_count,
'no_membership_count':
User.objects.annotate(groups_count=Count('groupmembership')).filter(groups_count=0, deleted=False).count(),
'deleted_count':
User.objects.filter(deleted=True).count(),
}
return fields
|
from django.contrib.auth import get_user_model
from foodsaving.groups.models import GroupMembership
from foodsaving.webhooks.models import EmailEvent
def get_users_stats():
User = get_user_model()
active_users = User.objects.filter(groupmembership__in=GroupMembership.objects.active(), deleted=False).distinct()
active_membership_count = GroupMembership.objects.active().count()
active_users_count = active_users.count()
fields = {
'active_count': active_users_count,
'active_unverified_count': active_users.filter(mail_verified=False).count(),
'active_ignored_email_count': active_users.filter(email__in=EmailEvent.objects.ignored_addresses()).count(),
'active_with_location_count': active_users.exclude(latitude=None).exclude(longitude=None).count(),
'active_with_mobile_number_count': active_users.exclude(mobile_number='').count(),
'active_with_description_count': active_users.exclude(description='').count(),
'active_with_photo_count': active_users.exclude(photo='').count(),
'active_memberships_per_active_user_avg': active_membership_count / active_users_count,
'no_membership_count': User.objects.filter(groupmembership=None, deleted=False).count(),
'deleted_count': User.objects.filter(deleted=True).count(),
}
return fields
|
Use slightly better approach to count users without groups
|
Use slightly better approach to count users without groups
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/yunity-core
|
python
|
## Code Before:
from django.contrib.auth import get_user_model
from django.db.models import Count
from foodsaving.groups.models import GroupMembership
from foodsaving.webhooks.models import EmailEvent
def get_users_stats():
User = get_user_model()
active_users = User.objects.filter(groupmembership__in=GroupMembership.objects.active(), deleted=False).distinct()
active_membership_count = GroupMembership.objects.active().count()
active_users_count = active_users.count()
fields = {
'active_count':
active_users_count,
'active_unverified_count':
active_users.filter(mail_verified=False).count(),
'active_ignored_email_count':
active_users.filter(email__in=EmailEvent.objects.ignored_addresses()).count(),
'active_with_location_count':
active_users.exclude(latitude=None).exclude(longitude=None).count(),
'active_with_mobile_number_count':
active_users.exclude(mobile_number='').count(),
'active_with_description_count':
active_users.exclude(description='').count(),
'active_with_photo_count':
active_users.exclude(photo='').count(),
'active_memberships_per_active_user_avg':
active_membership_count / active_users_count,
'no_membership_count':
User.objects.annotate(groups_count=Count('groupmembership')).filter(groups_count=0, deleted=False).count(),
'deleted_count':
User.objects.filter(deleted=True).count(),
}
return fields
## Instruction:
Use slightly better approach to count users without groups
## Code After:
from django.contrib.auth import get_user_model
from foodsaving.groups.models import GroupMembership
from foodsaving.webhooks.models import EmailEvent
def get_users_stats():
User = get_user_model()
active_users = User.objects.filter(groupmembership__in=GroupMembership.objects.active(), deleted=False).distinct()
active_membership_count = GroupMembership.objects.active().count()
active_users_count = active_users.count()
fields = {
'active_count': active_users_count,
'active_unverified_count': active_users.filter(mail_verified=False).count(),
'active_ignored_email_count': active_users.filter(email__in=EmailEvent.objects.ignored_addresses()).count(),
'active_with_location_count': active_users.exclude(latitude=None).exclude(longitude=None).count(),
'active_with_mobile_number_count': active_users.exclude(mobile_number='').count(),
'active_with_description_count': active_users.exclude(description='').count(),
'active_with_photo_count': active_users.exclude(photo='').count(),
'active_memberships_per_active_user_avg': active_membership_count / active_users_count,
'no_membership_count': User.objects.filter(groupmembership=None, deleted=False).count(),
'deleted_count': User.objects.filter(deleted=True).count(),
}
return fields
|
# ... existing code ...
from django.contrib.auth import get_user_model
from foodsaving.groups.models import GroupMembership
from foodsaving.webhooks.models import EmailEvent
# ... modified code ...
active_users_count = active_users.count()
fields = {
'active_count': active_users_count,
'active_unverified_count': active_users.filter(mail_verified=False).count(),
'active_ignored_email_count': active_users.filter(email__in=EmailEvent.objects.ignored_addresses()).count(),
'active_with_location_count': active_users.exclude(latitude=None).exclude(longitude=None).count(),
'active_with_mobile_number_count': active_users.exclude(mobile_number='').count(),
'active_with_description_count': active_users.exclude(description='').count(),
'active_with_photo_count': active_users.exclude(photo='').count(),
'active_memberships_per_active_user_avg': active_membership_count / active_users_count,
'no_membership_count': User.objects.filter(groupmembership=None, deleted=False).count(),
'deleted_count': User.objects.filter(deleted=True).count(),
}
return fields
# ... rest of the code ...
|
4ed8f05fa43f29a1881a23ae99fdc3ad8cd661b0
|
grammpy/StringGrammar.py
|
grammpy/StringGrammar.py
|
from .RawGrammar import RawGrammar as Grammar
class StringGrammar(Grammar):
@staticmethod
def __to_string_arr(t):
if isinstance(t, str):
return [t]
return t
def remove_term(self, term=None):
return super().remove_term(StringGrammar.__to_string_arr(term))
def add_term(self, term):
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
return super().term(StringGrammar.__to_string_arr(term))
def get_term(self, term=None):
return super().get_term(StringGrammar.__to_string_arr(term))
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
|
from .RawGrammar import RawGrammar as Grammar
class StringGrammar(Grammar):
@staticmethod
def __to_string_arr(t):
if isinstance(t, str):
return [t]
return t
def remove_term(self, term=None):
return super().remove_term(StringGrammar.__to_string_arr(term))
def add_term(self, term):
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
return super().term(StringGrammar.__to_string_arr(term))
def get_term(self, term=None):
res = super().get_term(StringGrammar.__to_string_arr(term))
if isinstance(term, str):
return res[0]
return res
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
|
Correct return of Terminal instance when parameter is string
|
Correct return of Terminal instance when parameter is string
|
Python
|
mit
|
PatrikValkovic/grammpy
|
python
|
## Code Before:
from .RawGrammar import RawGrammar as Grammar
class StringGrammar(Grammar):
@staticmethod
def __to_string_arr(t):
if isinstance(t, str):
return [t]
return t
def remove_term(self, term=None):
return super().remove_term(StringGrammar.__to_string_arr(term))
def add_term(self, term):
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
return super().term(StringGrammar.__to_string_arr(term))
def get_term(self, term=None):
return super().get_term(StringGrammar.__to_string_arr(term))
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
## Instruction:
Correct return of Terminal instance when parameter is string
## Code After:
from .RawGrammar import RawGrammar as Grammar
class StringGrammar(Grammar):
@staticmethod
def __to_string_arr(t):
if isinstance(t, str):
return [t]
return t
def remove_term(self, term=None):
return super().remove_term(StringGrammar.__to_string_arr(term))
def add_term(self, term):
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
return super().term(StringGrammar.__to_string_arr(term))
def get_term(self, term=None):
res = super().get_term(StringGrammar.__to_string_arr(term))
if isinstance(term, str):
return res[0]
return res
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
|
# ... existing code ...
return super().term(StringGrammar.__to_string_arr(term))
def get_term(self, term=None):
res = super().get_term(StringGrammar.__to_string_arr(term))
if isinstance(term, str):
return res[0]
return res
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
# ... rest of the code ...
|
a2d8178f844c9048b17aac98d6145bb3d613f809
|
Common/src/main/java/net/darkhax/bookshelf/api/serialization/SerializerIngredient.java
|
Common/src/main/java/net/darkhax/bookshelf/api/serialization/SerializerIngredient.java
|
package net.darkhax.bookshelf.api.serialization;
import com.google.gson.JsonElement;
import net.minecraft.nbt.Tag;
import net.minecraft.network.FriendlyByteBuf;
import net.minecraft.world.item.crafting.Ingredient;
public class SerializerIngredient implements ISerializer<Ingredient> {
public static final ISerializer<Ingredient> SERIALIZER = new SerializerIngredient();
private SerializerIngredient() {
}
@Override
public Ingredient fromJSON(JsonElement json) {
return Ingredient.fromJson(json);
}
@Override
public JsonElement toJSON(Ingredient toWrite) {
return toWrite.toJson();
}
@Override
public Ingredient fromByteBuf(FriendlyByteBuf buffer) {
return Ingredient.fromNetwork(buffer);
}
@Override
public void toByteBuf(FriendlyByteBuf buffer, Ingredient toWrite) {
toWrite.toNetwork(buffer);
}
@Override
public Tag toNBT(Ingredient toWrite) {
return Serializers.STRING.toNBT(this.toJSONString(toWrite));
}
@Override
public Ingredient fromNBT(Tag nbt) {
return this.fromJSONString(Serializers.STRING.fromNBT(nbt));
}
}
|
package net.darkhax.bookshelf.api.serialization;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import net.minecraft.nbt.Tag;
import net.minecraft.network.FriendlyByteBuf;
import net.minecraft.world.item.crafting.Ingredient;
public class SerializerIngredient implements ISerializer<Ingredient> {
public static final ISerializer<Ingredient> SERIALIZER = new SerializerIngredient();
private SerializerIngredient() {
}
@Override
public Ingredient fromJSON(JsonElement json) {
// Support Empty Ingredient
if (json instanceof JsonArray array && array.isEmpty()) {
return Ingredient.EMPTY;
}
return Ingredient.fromJson(json);
}
@Override
public JsonElement toJSON(Ingredient toWrite) {
return toWrite.toJson();
}
@Override
public Ingredient fromByteBuf(FriendlyByteBuf buffer) {
return Ingredient.fromNetwork(buffer);
}
@Override
public void toByteBuf(FriendlyByteBuf buffer, Ingredient toWrite) {
toWrite.toNetwork(buffer);
}
@Override
public Tag toNBT(Ingredient toWrite) {
return Serializers.STRING.toNBT(this.toJSONString(toWrite));
}
@Override
public Ingredient fromNBT(Tag nbt) {
return this.fromJSONString(Serializers.STRING.fromNBT(nbt));
}
}
|
Fix crash when serializing empty JSON ingredients.
|
Fix crash when serializing empty JSON ingredients.
|
Java
|
lgpl-2.1
|
Darkhax-Minecraft/Bookshelf
|
java
|
## Code Before:
package net.darkhax.bookshelf.api.serialization;
import com.google.gson.JsonElement;
import net.minecraft.nbt.Tag;
import net.minecraft.network.FriendlyByteBuf;
import net.minecraft.world.item.crafting.Ingredient;
public class SerializerIngredient implements ISerializer<Ingredient> {
public static final ISerializer<Ingredient> SERIALIZER = new SerializerIngredient();
private SerializerIngredient() {
}
@Override
public Ingredient fromJSON(JsonElement json) {
return Ingredient.fromJson(json);
}
@Override
public JsonElement toJSON(Ingredient toWrite) {
return toWrite.toJson();
}
@Override
public Ingredient fromByteBuf(FriendlyByteBuf buffer) {
return Ingredient.fromNetwork(buffer);
}
@Override
public void toByteBuf(FriendlyByteBuf buffer, Ingredient toWrite) {
toWrite.toNetwork(buffer);
}
@Override
public Tag toNBT(Ingredient toWrite) {
return Serializers.STRING.toNBT(this.toJSONString(toWrite));
}
@Override
public Ingredient fromNBT(Tag nbt) {
return this.fromJSONString(Serializers.STRING.fromNBT(nbt));
}
}
## Instruction:
Fix crash when serializing empty JSON ingredients.
## Code After:
package net.darkhax.bookshelf.api.serialization;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import net.minecraft.nbt.Tag;
import net.minecraft.network.FriendlyByteBuf;
import net.minecraft.world.item.crafting.Ingredient;
public class SerializerIngredient implements ISerializer<Ingredient> {
public static final ISerializer<Ingredient> SERIALIZER = new SerializerIngredient();
private SerializerIngredient() {
}
@Override
public Ingredient fromJSON(JsonElement json) {
// Support Empty Ingredient
if (json instanceof JsonArray array && array.isEmpty()) {
return Ingredient.EMPTY;
}
return Ingredient.fromJson(json);
}
@Override
public JsonElement toJSON(Ingredient toWrite) {
return toWrite.toJson();
}
@Override
public Ingredient fromByteBuf(FriendlyByteBuf buffer) {
return Ingredient.fromNetwork(buffer);
}
@Override
public void toByteBuf(FriendlyByteBuf buffer, Ingredient toWrite) {
toWrite.toNetwork(buffer);
}
@Override
public Tag toNBT(Ingredient toWrite) {
return Serializers.STRING.toNBT(this.toJSONString(toWrite));
}
@Override
public Ingredient fromNBT(Tag nbt) {
return this.fromJSONString(Serializers.STRING.fromNBT(nbt));
}
}
|
...
package net.darkhax.bookshelf.api.serialization;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import net.minecraft.nbt.Tag;
import net.minecraft.network.FriendlyByteBuf;
...
@Override
public Ingredient fromJSON(JsonElement json) {
// Support Empty Ingredient
if (json instanceof JsonArray array && array.isEmpty()) {
return Ingredient.EMPTY;
}
return Ingredient.fromJson(json);
}
...
|
4c5550420b8a9f1bf88f4329952f6e2a161cd20f
|
test/test_panels/test_navigation.py
|
test/test_panels/test_navigation.py
|
from pyqode.core.api import TextHelper
from pyqode.qt.QtTest import QTest
def test_toggle_button(editor):
editor.file.open('test/files/example.json')
editor.show()
TextHelper(editor).goto_line(6)
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
|
from pyqode.core.api import TextHelper
from pyqode.qt.QtTest import QTest
def test_toggle_button(editor):
editor.file.open('test/files/example.json')
editor.show()
TextHelper(editor).goto_line(6)
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text().replace('&', '').lower() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
|
Fix test on kaos with latest qt5
|
Fix test on kaos with latest qt5
|
Python
|
mit
|
pyQode/pyqode.json,pyQode/pyqode.json
|
python
|
## Code Before:
from pyqode.core.api import TextHelper
from pyqode.qt.QtTest import QTest
def test_toggle_button(editor):
editor.file.open('test/files/example.json')
editor.show()
TextHelper(editor).goto_line(6)
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
## Instruction:
Fix test on kaos with latest qt5
## Code After:
from pyqode.core.api import TextHelper
from pyqode.qt.QtTest import QTest
def test_toggle_button(editor):
editor.file.open('test/files/example.json')
editor.show()
TextHelper(editor).goto_line(6)
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text().replace('&', '').lower() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
|
// ... existing code ...
QTest.qWait(500)
panel = editor.panels.get('NavigationPanel')
assert len(panel._widgets) == 4
assert panel._widgets[1].text().replace('&', '').lower() == 'window'
panel._widgets[1].toggled.emit(True)
QTest.qWait(500)
assert TextHelper(editor).cursor_position()[0] == 3
// ... rest of the code ...
|
20f17c6dc5a1270ad8fe3c932fbb9cf384558b97
|
src/main/java/pl/niekoniecznie/polar/filesystem/PolarFile.java
|
src/main/java/pl/niekoniecznie/polar/filesystem/PolarFile.java
|
package pl.niekoniecznie.polar.filesystem;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ak on 07.04.15.
*/
public class PolarFile {
private final PolarFileSystem fs = new PolarFileSystem();
private final String path;
public PolarFile(String path) {
this.path = path;
}
public String getPath() {
return this.path;
}
public boolean isDirectory() {
return fs.isDirectory(this);
}
public List<String> list() throws IOException {
return fs.list(this);
}
public List<PolarFile> listFiles() throws IOException {
if (!isDirectory()) {
return null;
}
List<PolarFile> result = new ArrayList<>();
for (String child : list()) {
result.add(new PolarFile(child));
}
return result;
}
}
|
package pl.niekoniecznie.polar.filesystem;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ak on 07.04.15.
*/
public class PolarFile {
private final PolarFileSystem fs = new PolarFileSystem();
private final String path;
public PolarFile(String path) {
this.path = path;
}
public String getPath() {
return this.path;
}
public boolean isDirectory() {
return fs.isDirectory(this);
}
public List<PolarFile> listFiles() throws IOException {
if (!isDirectory()) {
return null;
}
List<PolarFile> result = new ArrayList<>();
fs.list(this).forEach((x) -> result.add(new PolarFile(x)));
return result;
}
}
|
Remove redundant method. Use your first lambda expression.
|
Remove redundant method. Use your first lambda expression.
|
Java
|
mit
|
dredzik/polar,dredzik/polarusbdump
|
java
|
## Code Before:
package pl.niekoniecznie.polar.filesystem;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ak on 07.04.15.
*/
public class PolarFile {
private final PolarFileSystem fs = new PolarFileSystem();
private final String path;
public PolarFile(String path) {
this.path = path;
}
public String getPath() {
return this.path;
}
public boolean isDirectory() {
return fs.isDirectory(this);
}
public List<String> list() throws IOException {
return fs.list(this);
}
public List<PolarFile> listFiles() throws IOException {
if (!isDirectory()) {
return null;
}
List<PolarFile> result = new ArrayList<>();
for (String child : list()) {
result.add(new PolarFile(child));
}
return result;
}
}
## Instruction:
Remove redundant method. Use your first lambda expression.
## Code After:
package pl.niekoniecznie.polar.filesystem;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by ak on 07.04.15.
*/
public class PolarFile {
private final PolarFileSystem fs = new PolarFileSystem();
private final String path;
public PolarFile(String path) {
this.path = path;
}
public String getPath() {
return this.path;
}
public boolean isDirectory() {
return fs.isDirectory(this);
}
public List<PolarFile> listFiles() throws IOException {
if (!isDirectory()) {
return null;
}
List<PolarFile> result = new ArrayList<>();
fs.list(this).forEach((x) -> result.add(new PolarFile(x)));
return result;
}
}
|
# ... existing code ...
return fs.isDirectory(this);
}
public List<PolarFile> listFiles() throws IOException {
if (!isDirectory()) {
return null;
# ... modified code ...
List<PolarFile> result = new ArrayList<>();
fs.list(this).forEach((x) -> result.add(new PolarFile(x)));
return result;
}
# ... rest of the code ...
|
159e1e210480c0037b3a550e70b77dbfce34bbca
|
ptyme/ptyme.py
|
ptyme/ptyme.py
|
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
print(time)
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
house = 0
time = time[0].split('m')
print(time)
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
time = time[0].split('s')
if time:
seconds = time[0]
else:
print("commands go here")
if __name__ == "__main__":
main()
|
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
# go through via characters
def parseArgsChar():
pass()
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
else:
print("commands go here")
if __name__ == "__main__":
main()
|
Update current parse, add alt parse frame
|
Update current parse, add alt parse frame
|
Python
|
mit
|
jabocg/ptyme
|
python
|
## Code Before:
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
print(time)
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
house = 0
time = time[0].split('m')
print(time)
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
time = time[0].split('s')
if time:
seconds = time[0]
else:
print("commands go here")
if __name__ == "__main__":
main()
## Instruction:
Update current parse, add alt parse frame
## Code After:
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
# go through via characters
def parseArgsChar():
pass()
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
else:
print("commands go here")
if __name__ == "__main__":
main()
|
...
print("Nope.")
print(argv)
# go through via characters
def parseArgsChar():
pass()
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
else:
print("commands go here")
...
|
b973a1686f269044e670704b56c07ca79336c29c
|
mythril/laser/ethereum/strategy/basic.py
|
mythril/laser/ethereum/strategy/basic.py
|
class DepthFirstSearchStrategy:
def __init__(self, content, max_depth):
self.content = content
self.max_depth = max_depth
def __iter__(self):
return self
def __next__(self):
try:
global_state = self.content.pop(0)
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
|
class DepthFirstSearchStrategy:
"""
Implements a depth first search strategy
I.E. Follow one path to a leaf, and then continue to the next one
"""
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
def __next__(self):
""" Picks the next state to execute """
try:
# This strategies assumes that new states are appended at the end of the work_list
# By taking the last element we effectively pick the "newest" states, which amounts to dfs
global_state = self.work_list.pop()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
|
Add documentation and fix pop
|
Add documentation and fix pop
|
Python
|
mit
|
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
|
python
|
## Code Before:
class DepthFirstSearchStrategy:
def __init__(self, content, max_depth):
self.content = content
self.max_depth = max_depth
def __iter__(self):
return self
def __next__(self):
try:
global_state = self.content.pop(0)
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
## Instruction:
Add documentation and fix pop
## Code After:
class DepthFirstSearchStrategy:
"""
Implements a depth first search strategy
I.E. Follow one path to a leaf, and then continue to the next one
"""
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
def __next__(self):
""" Picks the next state to execute """
try:
# This strategies assumes that new states are appended at the end of the work_list
# By taking the last element we effectively pick the "newest" states, which amounts to dfs
global_state = self.work_list.pop()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
|
# ... existing code ...
class DepthFirstSearchStrategy:
"""
Implements a depth first search strategy
I.E. Follow one path to a leaf, and then continue to the next one
"""
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
# ... modified code ...
return self
def __next__(self):
""" Picks the next state to execute """
try:
# This strategies assumes that new states are appended at the end of the work_list
# By taking the last element we effectively pick the "newest" states, which amounts to dfs
global_state = self.work_list.pop()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
# ... rest of the code ...
|
7b15a9b510bce6a3866c0d3d7cd78c0c477cb69d
|
transformations/pig_latin/transformation.py
|
transformations/pig_latin/transformation.py
|
import piglatin
from interfaces.SentenceOperation import SentenceOperation
from tasks.TaskTypes import TaskType
class PigLatin(SentenceOperation):
tasks = [
TaskType.TEXT_CLASSIFICATION,
TaskType.TEXT_TO_TEXT_GENERATION,
TaskType.TEXT_TAGGING,
]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1):
super().__init__(seed, max_outputs=max_outputs)
def generate(self, sentence: str):
output_sentence = piglatin.translate(sentence.lower())
piglatin_sentence = []
for word in output_sentence.split():
piglatin_sentence.append(word.replace('-', ''))
piglatin_sentence = ' '.join(piglatin_sentence)
return [piglatin_sentence]
|
import piglatin
import random
from interfaces.SentenceOperation import SentenceOperation
from tasks.TaskTypes import TaskType
class PigLatin(SentenceOperation):
tasks = [
TaskType.TEXT_CLASSIFICATION,
TaskType.TEXT_TO_TEXT_GENERATION,
TaskType.TEXT_TAGGING,
]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1, replace_prob=1.0):
super().__init__(seed, max_outputs=max_outputs)
self.replace_prob = replace_prob
def generate(self, sentence: str):
piglatin_sentences = []
for _ in range(self.max_outputs):
piglatin_sentence = []
for word in sentence.lower().split():
if random.random() < self.replace_prob:
new_word = piglatin.translate(word)
else:
new_word = word
piglatin_sentence.append(new_word.replace('-', ''))
piglatin_sentence = ' '.join(piglatin_sentence)
piglatin_sentences.append(piglatin_sentence)
return piglatin_sentences
|
Add per-word replace probability, max outputs.
|
Add per-word replace probability, max outputs.
|
Python
|
mit
|
GEM-benchmark/NL-Augmenter
|
python
|
## Code Before:
import piglatin
from interfaces.SentenceOperation import SentenceOperation
from tasks.TaskTypes import TaskType
class PigLatin(SentenceOperation):
tasks = [
TaskType.TEXT_CLASSIFICATION,
TaskType.TEXT_TO_TEXT_GENERATION,
TaskType.TEXT_TAGGING,
]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1):
super().__init__(seed, max_outputs=max_outputs)
def generate(self, sentence: str):
output_sentence = piglatin.translate(sentence.lower())
piglatin_sentence = []
for word in output_sentence.split():
piglatin_sentence.append(word.replace('-', ''))
piglatin_sentence = ' '.join(piglatin_sentence)
return [piglatin_sentence]
## Instruction:
Add per-word replace probability, max outputs.
## Code After:
import piglatin
import random
from interfaces.SentenceOperation import SentenceOperation
from tasks.TaskTypes import TaskType
class PigLatin(SentenceOperation):
tasks = [
TaskType.TEXT_CLASSIFICATION,
TaskType.TEXT_TO_TEXT_GENERATION,
TaskType.TEXT_TAGGING,
]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1, replace_prob=1.0):
super().__init__(seed, max_outputs=max_outputs)
self.replace_prob = replace_prob
def generate(self, sentence: str):
piglatin_sentences = []
for _ in range(self.max_outputs):
piglatin_sentence = []
for word in sentence.lower().split():
if random.random() < self.replace_prob:
new_word = piglatin.translate(word)
else:
new_word = word
piglatin_sentence.append(new_word.replace('-', ''))
piglatin_sentence = ' '.join(piglatin_sentence)
piglatin_sentences.append(piglatin_sentence)
return piglatin_sentences
|
// ... existing code ...
import piglatin
import random
from interfaces.SentenceOperation import SentenceOperation
from tasks.TaskTypes import TaskType
// ... modified code ...
]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1, replace_prob=1.0):
super().__init__(seed, max_outputs=max_outputs)
self.replace_prob = replace_prob
def generate(self, sentence: str):
piglatin_sentences = []
for _ in range(self.max_outputs):
piglatin_sentence = []
for word in sentence.lower().split():
if random.random() < self.replace_prob:
new_word = piglatin.translate(word)
else:
new_word = word
piglatin_sentence.append(new_word.replace('-', ''))
piglatin_sentence = ' '.join(piglatin_sentence)
piglatin_sentences.append(piglatin_sentence)
return piglatin_sentences
// ... rest of the code ...
|
df91e2840e84ecbcf74a46eb40c467dfe7d9a21e
|
setup.py
|
setup.py
|
"""Setup for pymd5 module and command-line script."""
from setuptools import setup
def readme():
"""Use text contained in README.rst as long description."""
with open('README.rst') as desc:
return desc.read()
setup(name='pymd5',
version='0.1',
description=('Recursively calculate and display MD5 file hashes '
'for all files rooted in a directory.'),
long_description=readme(),
url='https://github.com/richmilne/pymd5',
author='Richard Milne',
author_email='[email protected]',
license='MIT',
packages=['pymd5'],
include_package_data=True,
entry_points={
'console_scripts': ['pymd5=pymd5:_read_args']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
"""Setup for pymd5 module and command-line script."""
from setuptools import setup
def readme():
"""Use text contained in README.rst as long description."""
with open('README.rst') as desc:
return desc.read()
setup(name='pymd5',
version='0.1',
description=('Recursively calculate and display MD5 file hashes '
'for all files rooted in a directory.'),
long_description=readme(),
url='https://github.com/richmilne/pymd5/releases/tag/v0.1',
author='Richard Milne',
author_email='[email protected]',
license='MIT',
packages=['pymd5'],
include_package_data=True,
entry_points={
'console_scripts': ['pymd5=pymd5:_read_args']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Change url to point to tagged release
|
Change url to point to tagged release
|
Python
|
mit
|
richmilne/pymd5
|
python
|
## Code Before:
"""Setup for pymd5 module and command-line script."""
from setuptools import setup
def readme():
"""Use text contained in README.rst as long description."""
with open('README.rst') as desc:
return desc.read()
setup(name='pymd5',
version='0.1',
description=('Recursively calculate and display MD5 file hashes '
'for all files rooted in a directory.'),
long_description=readme(),
url='https://github.com/richmilne/pymd5',
author='Richard Milne',
author_email='[email protected]',
license='MIT',
packages=['pymd5'],
include_package_data=True,
entry_points={
'console_scripts': ['pymd5=pymd5:_read_args']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
## Instruction:
Change url to point to tagged release
## Code After:
"""Setup for pymd5 module and command-line script."""
from setuptools import setup
def readme():
"""Use text contained in README.rst as long description."""
with open('README.rst') as desc:
return desc.read()
setup(name='pymd5',
version='0.1',
description=('Recursively calculate and display MD5 file hashes '
'for all files rooted in a directory.'),
long_description=readme(),
url='https://github.com/richmilne/pymd5/releases/tag/v0.1',
author='Richard Milne',
author_email='[email protected]',
license='MIT',
packages=['pymd5'],
include_package_data=True,
entry_points={
'console_scripts': ['pymd5=pymd5:_read_args']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
# ... existing code ...
description=('Recursively calculate and display MD5 file hashes '
'for all files rooted in a directory.'),
long_description=readme(),
url='https://github.com/richmilne/pymd5/releases/tag/v0.1',
author='Richard Milne',
author_email='[email protected]',
license='MIT',
# ... rest of the code ...
|
0975c706c503d403810ddeba24ea14b5c9bdd133
|
d1_client_cli/src/setup.py
|
d1_client_cli/src/setup.py
|
from setuptools import setup, find_packages
import d1_client_cli
setup(
name='Python DataONE CLI',
version=d1_client_cli.__version__,
author='Roger Dahl, and the DataONE Development Team',
author_email='[email protected]',
url='http://dataone.org',
license='Apache License, Version 2.0',
description='A DataONE Command-line interface',
packages=find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires=[
'pyxb >= 1.1.3',
],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
|
from setuptools import setup, find_packages
import d1_client_cli
setup(
name='Python DataONE CLI',
version=d1_client_cli.__version__,
author='Roger Dahl, and the DataONE Development Team',
author_email='[email protected]',
url='http://dataone.org',
license='Apache License, Version 2.0',
description='A DataONE Command-line interface',
packages = find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires = [
'pyxb >= 1.1.3',
],
dependency_links = [
'https://repository.dataone.org/software/python_products/d1_cli/Python_DataONE_Common-1.0.0c4-py2.6.egg',
'https://repository.dataone.org/software/python_products/d1_cli/Python_DataONE_Client_Library-1.0.0c4-py2.6.egg',
]
package_data = {
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
|
Add references to common and libclient, which will not be deployed to PyPi.
|
Add references to common and libclient, which will not be deployed to PyPi.
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
python
|
## Code Before:
from setuptools import setup, find_packages
import d1_client_cli
setup(
name='Python DataONE CLI',
version=d1_client_cli.__version__,
author='Roger Dahl, and the DataONE Development Team',
author_email='[email protected]',
url='http://dataone.org',
license='Apache License, Version 2.0',
description='A DataONE Command-line interface',
packages=find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires=[
'pyxb >= 1.1.3',
],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
## Instruction:
Add references to common and libclient, which will not be deployed to PyPi.
## Code After:
from setuptools import setup, find_packages
import d1_client_cli
setup(
name='Python DataONE CLI',
version=d1_client_cli.__version__,
author='Roger Dahl, and the DataONE Development Team',
author_email='[email protected]',
url='http://dataone.org',
license='Apache License, Version 2.0',
description='A DataONE Command-line interface',
packages = find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires = [
'pyxb >= 1.1.3',
],
dependency_links = [
'https://repository.dataone.org/software/python_products/d1_cli/Python_DataONE_Common-1.0.0c4-py2.6.egg',
'https://repository.dataone.org/software/python_products/d1_cli/Python_DataONE_Client_Library-1.0.0c4-py2.6.egg',
]
package_data = {
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
|
# ... existing code ...
url='http://dataone.org',
license='Apache License, Version 2.0',
description='A DataONE Command-line interface',
packages = find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires = [
'pyxb >= 1.1.3',
],
dependency_links = [
'https://repository.dataone.org/software/python_products/d1_cli/Python_DataONE_Common-1.0.0c4-py2.6.egg',
'https://repository.dataone.org/software/python_products/d1_cli/Python_DataONE_Client_Library-1.0.0c4-py2.6.egg',
]
package_data = {
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
# ... rest of the code ...
|
adf46f5b90d04ce8e26701810b6c23bc230ddc37
|
nova/conf/consoleauth.py
|
nova/conf/consoleauth.py
|
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
Add an additional description for 'token_ttl'
|
Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6
|
Python
|
apache-2.0
|
openstack/nova,klmitch/nova,klmitch/nova,mahak/nova,phenoxim/nova,gooddata/openstack-nova,mikalstill/nova,gooddata/openstack-nova,mahak/nova,rahulunair/nova,klmitch/nova,klmitch/nova,rahulunair/nova,mikalstill/nova,gooddata/openstack-nova,mikalstill/nova,rahulunair/nova,openstack/nova,phenoxim/nova,openstack/nova,mahak/nova,gooddata/openstack-nova
|
python
|
## Code Before:
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token.
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
## Instruction:
Add an additional description for 'token_ttl'
The unit of 'token_ttl' is not clear
in the help text in nova/conf/consoleauth.py.
So add the unit (in seconds) in the help text.
TrivialFix
Change-Id: Id6506b7462c303223bac8586e664e187cb52abd6
## Code After:
from oslo_config import cfg
consoleauth_group = cfg.OptGroup(
name='consoleauth',
title='Console auth options')
consoleauth_opts = [
cfg.IntOpt('token_ttl',
default=600,
min=0,
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
considered expired. Expired tokens are then deleted.
""")
]
def register_opts(conf):
conf.register_group(consoleauth_group)
conf.register_opts(consoleauth_opts, group=consoleauth_group)
def list_opts():
return {consoleauth_group: consoleauth_opts}
|
# ... existing code ...
deprecated_name='console_token_ttl',
deprecated_group='DEFAULT',
help="""
The lifetime of a console auth token (in seconds).
A console auth token is used in authorizing console access for a user.
Once the auth token time to live count has elapsed, the token is
# ... rest of the code ...
|
312bb90415218398ddbe9250cfe7dbc4bb013e14
|
opal/core/lookuplists.py
|
opal/core/lookuplists.py
|
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
# class LookupList(models.Model):
# class Meta:
# abstract = True
class LookupList(models.Model):
name = models.CharField(max_length=255, unique=True)
synonyms = GenericRelation('opal.Synonym')
class Meta:
ordering = ['name']
abstract = True
def __unicode__(self):
return self.name
def to_dict(self, user):
return self.name
# def lookup_list(name, module=__name__):
# """
# Given the name of a lookup list, return the tuple of class_name, bases, attrs
# for the user to define the class
# """
# prefix = 'Lookup List: '
# class_name = name.capitalize() # TODO handle camelcase properly
# bases = (LookupList,)
# attrs = {
# 'name': models.CharField(max_length=255, unique=True),
# 'synonyms': generic.GenericRelation('opal.Synonym'),
# 'Meta': type('Meta', (object,), {'ordering': ['name'],
# 'verbose_name': prefix+name}),
# '__unicode__': lambda self: self.name,
# '__module__': module,
# }
# return class_name, bases, attrs
|
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
class LookupList(models.Model):
name = models.CharField(max_length=255, unique=True)
synonyms = GenericRelation('opal.Synonym')
class Meta:
ordering = ['name']
abstract = True
def __unicode__(self):
return self.name
def to_dict(self, user):
return self.name
|
Delete commented out old code.
|
Delete commented out old code.
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
python
|
## Code Before:
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
# class LookupList(models.Model):
# class Meta:
# abstract = True
class LookupList(models.Model):
name = models.CharField(max_length=255, unique=True)
synonyms = GenericRelation('opal.Synonym')
class Meta:
ordering = ['name']
abstract = True
def __unicode__(self):
return self.name
def to_dict(self, user):
return self.name
# def lookup_list(name, module=__name__):
# """
# Given the name of a lookup list, return the tuple of class_name, bases, attrs
# for the user to define the class
# """
# prefix = 'Lookup List: '
# class_name = name.capitalize() # TODO handle camelcase properly
# bases = (LookupList,)
# attrs = {
# 'name': models.CharField(max_length=255, unique=True),
# 'synonyms': generic.GenericRelation('opal.Synonym'),
# 'Meta': type('Meta', (object,), {'ordering': ['name'],
# 'verbose_name': prefix+name}),
# '__unicode__': lambda self: self.name,
# '__module__': module,
# }
# return class_name, bases, attrs
## Instruction:
Delete commented out old code.
## Code After:
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
class LookupList(models.Model):
name = models.CharField(max_length=255, unique=True)
synonyms = GenericRelation('opal.Synonym')
class Meta:
ordering = ['name']
abstract = True
def __unicode__(self):
return self.name
def to_dict(self, user):
return self.name
|
# ... existing code ...
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
class LookupList(models.Model):
name = models.CharField(max_length=255, unique=True)
# ... modified code ...
def to_dict(self, user):
return self.name
# ... rest of the code ...
|
31f7608af768fafb3777a1abf6cc9a78bf8600ae
|
src/protocolsupport/protocol/transformer/middlepacketimpl/serverbound/play/v_1_4_1_5_1_6_1_7/BlockPlace.java
|
src/protocolsupport/protocol/transformer/middlepacketimpl/serverbound/play/v_1_4_1_5_1_6_1_7/BlockPlace.java
|
package protocolsupport.protocol.transformer.middlepacketimpl.serverbound.play.v_1_4_1_5_1_6_1_7;
import java.io.IOException;
import net.minecraft.server.v1_9_R1.BlockPosition;
import protocolsupport.protocol.PacketDataSerializer;
import protocolsupport.protocol.transformer.middlepacket.serverbound.play.MiddleBlockPlace;
public class BlockPlace extends MiddleBlockPlace {
@Override
public void readFromClientData(PacketDataSerializer serializer) throws IOException {
position = new BlockPosition(serializer.readInt(), serializer.readUnsignedByte(), serializer.readInt());
face = serializer.readUnsignedByte();
cX = serializer.readUnsignedByte();
cY = serializer.readUnsignedByte();
cZ = serializer.readUnsignedByte();
}
}
|
package protocolsupport.protocol.transformer.middlepacketimpl.serverbound.play.v_1_4_1_5_1_6_1_7;
import java.io.IOException;
import net.minecraft.server.v1_9_R1.BlockPosition;
import protocolsupport.protocol.PacketDataSerializer;
import protocolsupport.protocol.transformer.middlepacket.serverbound.play.MiddleBlockPlace;
public class BlockPlace extends MiddleBlockPlace {
@Override
public void readFromClientData(PacketDataSerializer serializer) throws IOException {
position = new BlockPosition(serializer.readInt(), serializer.readUnsignedByte(), serializer.readInt());
face = serializer.readUnsignedByte();
serializer.readItemStack();
cX = serializer.readUnsignedByte();
cY = serializer.readUnsignedByte();
cZ = serializer.readUnsignedByte();
}
}
|
Fix block place packet reading
|
Fix block place packet reading
|
Java
|
agpl-3.0
|
ridalarry/ProtocolSupport,ProtocolSupport/ProtocolSupport
|
java
|
## Code Before:
package protocolsupport.protocol.transformer.middlepacketimpl.serverbound.play.v_1_4_1_5_1_6_1_7;
import java.io.IOException;
import net.minecraft.server.v1_9_R1.BlockPosition;
import protocolsupport.protocol.PacketDataSerializer;
import protocolsupport.protocol.transformer.middlepacket.serverbound.play.MiddleBlockPlace;
public class BlockPlace extends MiddleBlockPlace {
@Override
public void readFromClientData(PacketDataSerializer serializer) throws IOException {
position = new BlockPosition(serializer.readInt(), serializer.readUnsignedByte(), serializer.readInt());
face = serializer.readUnsignedByte();
cX = serializer.readUnsignedByte();
cY = serializer.readUnsignedByte();
cZ = serializer.readUnsignedByte();
}
}
## Instruction:
Fix block place packet reading
## Code After:
package protocolsupport.protocol.transformer.middlepacketimpl.serverbound.play.v_1_4_1_5_1_6_1_7;
import java.io.IOException;
import net.minecraft.server.v1_9_R1.BlockPosition;
import protocolsupport.protocol.PacketDataSerializer;
import protocolsupport.protocol.transformer.middlepacket.serverbound.play.MiddleBlockPlace;
public class BlockPlace extends MiddleBlockPlace {
@Override
public void readFromClientData(PacketDataSerializer serializer) throws IOException {
position = new BlockPosition(serializer.readInt(), serializer.readUnsignedByte(), serializer.readInt());
face = serializer.readUnsignedByte();
serializer.readItemStack();
cX = serializer.readUnsignedByte();
cY = serializer.readUnsignedByte();
cZ = serializer.readUnsignedByte();
}
}
|
...
public void readFromClientData(PacketDataSerializer serializer) throws IOException {
position = new BlockPosition(serializer.readInt(), serializer.readUnsignedByte(), serializer.readInt());
face = serializer.readUnsignedByte();
serializer.readItemStack();
cX = serializer.readUnsignedByte();
cY = serializer.readUnsignedByte();
cZ = serializer.readUnsignedByte();
...
|
3510978fa5a61698ad439dd2a833fd74d6abd85e
|
src/main/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
|
src/main/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
|
package org.apache.hadoop.hive.ql.io.parquet.timestamp;
import org.apache.hadoop.hive.common.type.Timestamp;
import static java.lang.Math.toIntExact;
import static java.util.concurrent.TimeUnit.SECONDS;
public final class NanoTimeUtils
{
private NanoTimeUtils() {}
private static final int JULIAN_EPOCH_OFFSET_DAYS = 2_440_588;
public static NanoTime getNanoTime(Timestamp timestamp, @SuppressWarnings("unused") boolean ignored)
{
int epochDay = toIntExact(SECONDS.toDays(timestamp.toEpochSecond()));
int julianDay = JULIAN_EPOCH_OFFSET_DAYS + epochDay;
long timeOfDaySeconds = timestamp.toEpochSecond() % 86400;
long timeOfDayNanos = SECONDS.toNanos(timeOfDaySeconds) + timestamp.getNanos();
return new NanoTime(julianDay, timeOfDayNanos);
}
}
|
package org.apache.hadoop.hive.ql.io.parquet.timestamp;
import org.apache.hadoop.hive.common.type.Timestamp;
import static java.lang.Math.floorDiv;
import static java.lang.Math.floorMod;
import static java.lang.Math.toIntExact;
import static java.util.concurrent.TimeUnit.SECONDS;
public final class NanoTimeUtils
{
private NanoTimeUtils() {}
private static final int JULIAN_EPOCH_OFFSET_DAYS = 2_440_588;
private static final long SECONDS_PER_DAY = 86400L;
public static NanoTime getNanoTime(Timestamp timestamp, @SuppressWarnings("unused") boolean ignored)
{
long epochSeconds = timestamp.toEpochSecond();
int epochDay = toIntExact(floorDiv(epochSeconds, SECONDS_PER_DAY));
int julianDay = JULIAN_EPOCH_OFFSET_DAYS + epochDay;
long timeOfDaySeconds = floorMod(epochSeconds, SECONDS_PER_DAY);
long timeOfDayNanos = SECONDS.toNanos(timeOfDaySeconds) + timestamp.getNanos();
return new NanoTime(julianDay, timeOfDayNanos);
}
}
|
Fix Parquet encoding for timestamps before epoch
|
Fix Parquet encoding for timestamps before epoch
|
Java
|
apache-2.0
|
electrum/presto-hive-apache
|
java
|
## Code Before:
package org.apache.hadoop.hive.ql.io.parquet.timestamp;
import org.apache.hadoop.hive.common.type.Timestamp;
import static java.lang.Math.toIntExact;
import static java.util.concurrent.TimeUnit.SECONDS;
public final class NanoTimeUtils
{
private NanoTimeUtils() {}
private static final int JULIAN_EPOCH_OFFSET_DAYS = 2_440_588;
public static NanoTime getNanoTime(Timestamp timestamp, @SuppressWarnings("unused") boolean ignored)
{
int epochDay = toIntExact(SECONDS.toDays(timestamp.toEpochSecond()));
int julianDay = JULIAN_EPOCH_OFFSET_DAYS + epochDay;
long timeOfDaySeconds = timestamp.toEpochSecond() % 86400;
long timeOfDayNanos = SECONDS.toNanos(timeOfDaySeconds) + timestamp.getNanos();
return new NanoTime(julianDay, timeOfDayNanos);
}
}
## Instruction:
Fix Parquet encoding for timestamps before epoch
## Code After:
package org.apache.hadoop.hive.ql.io.parquet.timestamp;
import org.apache.hadoop.hive.common.type.Timestamp;
import static java.lang.Math.floorDiv;
import static java.lang.Math.floorMod;
import static java.lang.Math.toIntExact;
import static java.util.concurrent.TimeUnit.SECONDS;
public final class NanoTimeUtils
{
private NanoTimeUtils() {}
private static final int JULIAN_EPOCH_OFFSET_DAYS = 2_440_588;
private static final long SECONDS_PER_DAY = 86400L;
public static NanoTime getNanoTime(Timestamp timestamp, @SuppressWarnings("unused") boolean ignored)
{
long epochSeconds = timestamp.toEpochSecond();
int epochDay = toIntExact(floorDiv(epochSeconds, SECONDS_PER_DAY));
int julianDay = JULIAN_EPOCH_OFFSET_DAYS + epochDay;
long timeOfDaySeconds = floorMod(epochSeconds, SECONDS_PER_DAY);
long timeOfDayNanos = SECONDS.toNanos(timeOfDaySeconds) + timestamp.getNanos();
return new NanoTime(julianDay, timeOfDayNanos);
}
}
|
# ... existing code ...
import org.apache.hadoop.hive.common.type.Timestamp;
import static java.lang.Math.floorDiv;
import static java.lang.Math.floorMod;
import static java.lang.Math.toIntExact;
import static java.util.concurrent.TimeUnit.SECONDS;
# ... modified code ...
private NanoTimeUtils() {}
private static final int JULIAN_EPOCH_OFFSET_DAYS = 2_440_588;
private static final long SECONDS_PER_DAY = 86400L;
public static NanoTime getNanoTime(Timestamp timestamp, @SuppressWarnings("unused") boolean ignored)
{
long epochSeconds = timestamp.toEpochSecond();
int epochDay = toIntExact(floorDiv(epochSeconds, SECONDS_PER_DAY));
int julianDay = JULIAN_EPOCH_OFFSET_DAYS + epochDay;
long timeOfDaySeconds = floorMod(epochSeconds, SECONDS_PER_DAY);
long timeOfDayNanos = SECONDS.toNanos(timeOfDaySeconds) + timestamp.getNanos();
return new NanoTime(julianDay, timeOfDayNanos);
# ... rest of the code ...
|
32d7a975850b4d59ffdc51d3f9916b078a3eeb01
|
src/revert/MainScene/notifications/WorldNotification.java
|
src/revert/MainScene/notifications/WorldNotification.java
|
package revert.MainScene.notifications;
/**
* Notification type of the game's scoring mechanics
*
* @author nhydock
*/
public class WorldNotification {
private static final String SCORE_FMT = "$\n%d";
private static final String TIME_FMT = "%02d";
public final String score;
public final String time;
public WorldNotification(final int score, final int time)
{
this.score = String.format(SCORE_FMT, score);
this.time = String.format(TIME_FMT, time);
}
}
|
package revert.MainScene.notifications;
/**
* Notification type of the game's scoring mechanics
*
* @author nhydock
*/
public class WorldNotification {
private static final String SCORE_FMT = "$\n%d";
private static final String TIME_FMT = "%02d";
public final String score;
public final String time;
public WorldNotification(final int score, final int time)
{
this.score = String.format(SCORE_FMT, score);
//make sure to convert time from ms to sec
this.time = String.format(TIME_FMT, (int)(time/1000));
}
}
|
Fix time display to be in seconds instead of ms
|
Fix time display to be in seconds instead of ms
|
Java
|
mit
|
nhydock/revert
|
java
|
## Code Before:
package revert.MainScene.notifications;
/**
* Notification type of the game's scoring mechanics
*
* @author nhydock
*/
public class WorldNotification {
private static final String SCORE_FMT = "$\n%d";
private static final String TIME_FMT = "%02d";
public final String score;
public final String time;
public WorldNotification(final int score, final int time)
{
this.score = String.format(SCORE_FMT, score);
this.time = String.format(TIME_FMT, time);
}
}
## Instruction:
Fix time display to be in seconds instead of ms
## Code After:
package revert.MainScene.notifications;
/**
* Notification type of the game's scoring mechanics
*
* @author nhydock
*/
public class WorldNotification {
private static final String SCORE_FMT = "$\n%d";
private static final String TIME_FMT = "%02d";
public final String score;
public final String time;
public WorldNotification(final int score, final int time)
{
this.score = String.format(SCORE_FMT, score);
//make sure to convert time from ms to sec
this.time = String.format(TIME_FMT, (int)(time/1000));
}
}
|
# ... existing code ...
public WorldNotification(final int score, final int time)
{
this.score = String.format(SCORE_FMT, score);
//make sure to convert time from ms to sec
this.time = String.format(TIME_FMT, (int)(time/1000));
}
}
# ... rest of the code ...
|
786ebc992ac09cd4b25e90ee2a243447e39c237f
|
director/accounts/forms.py
|
director/accounts/forms.py
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
"name", "logo", Submit("submit", "Update", css_class="button is-primary")
)
class Meta:
model = Account
fields = ("name", "logo")
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, HTML, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
from assets.thema import themes
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
Div(
HTML(
'<p class="title is-4">Identity</p>'
'<p class="subtitle is-5">Settings for your account\'s public profile.</p>'
),
"name",
"logo",
css_class="section",
),
Div(
HTML(
'<p class="title is-4">Content</p>'
'<p class="subtitle is-5">Settings affecting how content is served for your projects.</p>'
),
"theme",
"hosts",
css_class="section",
),
Submit("submit", "Update", css_class="button is-primary"),
)
class Meta:
model = Account
fields = ("name", "logo", "theme", "hosts")
widgets = {
"theme": forms.Select(choices=[(theme, theme) for theme in themes]),
"hosts": forms.TextInput(),
}
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
Add theme and hosts to settings
|
feat(Accounts): Add theme and hosts to settings
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
python
|
## Code Before:
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
"name", "logo", Submit("submit", "Update", css_class="button is-primary")
)
class Meta:
model = Account
fields = ("name", "logo")
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
## Instruction:
feat(Accounts): Add theme and hosts to settings
## Code After:
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, HTML, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
from assets.thema import themes
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
Div(
HTML(
'<p class="title is-4">Identity</p>'
'<p class="subtitle is-5">Settings for your account\'s public profile.</p>'
),
"name",
"logo",
css_class="section",
),
Div(
HTML(
'<p class="title is-4">Content</p>'
'<p class="subtitle is-5">Settings affecting how content is served for your projects.</p>'
),
"theme",
"hosts",
css_class="section",
),
Submit("submit", "Update", css_class="button is-primary"),
)
class Meta:
model = Account
fields = ("name", "logo", "theme", "hosts")
widgets = {
"theme": forms.Select(choices=[(theme, theme) for theme in themes]),
"hosts": forms.TextInput(),
}
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
# ... existing code ...
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, HTML, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
from assets.thema import themes
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
# ... modified code ...
fields = ("name", "logo")
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
Div(
HTML(
'<p class="title is-4">Identity</p>'
'<p class="subtitle is-5">Settings for your account\'s public profile.</p>'
),
"name",
"logo",
css_class="section",
),
Div(
HTML(
'<p class="title is-4">Content</p>'
'<p class="subtitle is-5">Settings affecting how content is served for your projects.</p>'
),
"theme",
"hosts",
css_class="section",
),
Submit("submit", "Update", css_class="button is-primary"),
)
class Meta:
model = Account
fields = ("name", "logo", "theme", "hosts")
widgets = {
"theme": forms.Select(choices=[(theme, theme) for theme in themes]),
"hosts": forms.TextInput(),
}
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
# ... rest of the code ...
|
58d5915d70c356c150fcdd0cdce940a831014449
|
android/AndroidScriptingEnvironment/src/com/google/ase/interpreter/sh/ShInterpreterProcess.java
|
android/AndroidScriptingEnvironment/src/com/google/ase/interpreter/sh/ShInterpreterProcess.java
|
/*
* Copyright (C) 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.ase.interpreter.sh;
import com.google.ase.AndroidFacade;
import com.google.ase.AndroidProxy;
import com.google.ase.interpreter.AbstractInterpreterProcess;
import com.google.ase.jsonrpc.JsonRpcServer;
public class ShInterpreterProcess extends AbstractInterpreterProcess {
private final AndroidProxy mAndroidProxy;
private final int mAndroidProxyPort;
public ShInterpreterProcess(AndroidFacade facade, String launchScript) {
super(facade, launchScript);
mAndroidProxy = new AndroidProxy(facade);
mAndroidProxyPort = new JsonRpcServer(mAndroidProxy).start();
buildEnvironment();
}
private void buildEnvironment() {
mEnvironment.put("AP_PORT", Integer.toString(mAndroidProxyPort));
}
}
|
/*
* Copyright (C) 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.ase.interpreter.sh;
import com.google.ase.AndroidFacade;
import com.google.ase.AndroidProxy;
import com.google.ase.interpreter.AbstractInterpreterProcess;
import com.google.ase.jsonrpc.JsonRpcServer;
public class ShInterpreterProcess extends AbstractInterpreterProcess {
private final AndroidProxy mAndroidProxy;
private final int mAndroidProxyPort;
public ShInterpreterProcess(AndroidFacade facade, String launchScript) {
super(facade, launchScript);
mAndroidProxy = new AndroidProxy(facade);
mAndroidProxyPort = new JsonRpcServer(mAndroidProxy).start();
buildEnvironment();
}
private void buildEnvironment() {
mEnvironment.put("AP_PORT", Integer.toString(mAndroidProxyPort));
}
@Override
protected void writeInterpreterCommand() {
if (mLaunchScript != null) {
print(SHELL_BIN + " " + mLaunchScript + "\n");
}
}
}
|
Add missing call to start shell scripts.
|
Add missing call to start shell scripts.
|
Java
|
apache-2.0
|
olapaola/olapaola-android-scripting,jeremiahmarks/sl4a,Lh4cKg/sl4a,giuliolunati/sl4a,cristiana214/cristianachavez214-cristianachavez,barbarubra/Don-t-know-What-i-m-doing.,cristiana214/cristianachavez214-cristianachavez,mSenyor/sl4a,kjc88/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,vlinhd11/vlinhd11-android-scripting,kerr-huang/SL4A,vlinhd11/vlinhd11-android-scripting,barbarubra/Don-t-know-What-i-m-doing.,matmutant/sl4a,kuri65536/sl4a,damonkohler/sl4a,valkjsaaa/sl4a,kjc88/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,liamgh/liamgreenhughes-sl4a-tf101,kjc88/sl4a,kuri65536/sl4a,valkjsaaa/sl4a,wskplho/sl4a,miguelpalacio/sl4a,jeremiahmarks/sl4a,olapaola/olapaola-android-scripting,jeremiahmarks/sl4a,kevinmel2000/sl4a,Lh4cKg/sl4a,olapaola/olapaola-android-scripting,giuliolunati/sl4a,jeremiahmarks/sl4a,matmutant/sl4a,kdheepak89/sl4a,kdheepak89/sl4a,olapaola/olapaola-android-scripting,pforret/sl4a,liamgh/liamgreenhughes-sl4a-tf101,damonkohler/sl4a,mSenyor/sl4a,miguelpalacio/sl4a,kdheepak89/sl4a,cristiana214/cristianachavez214-cristianachavez,wskplho/sl4a,matmutant/sl4a,vlachoudis/sl4a,barbarubra/Don-t-know-What-i-m-doing.,cristiana214/cristianachavez214-cristianachavez,mSenyor/sl4a,mSenyor/sl4a,wskplho/sl4a,Lh4cKg/sl4a,jeremiahmarks/sl4a,thejeshgn/sl4a,yqm/sl4a,kevinmel2000/sl4a,cristiana214/cristianachavez214-cristianachavez,thejeshgn/sl4a,yqm/sl4a,liamgh/liamgreenhughes-sl4a-tf101,kerr-huang/SL4A,valkjsaaa/sl4a,valkjsaaa/sl4a,jeremiahmarks/sl4a,mSenyor/sl4a,thejeshgn/sl4a,kerr-huang/SL4A,SatoshiNXSimudrone/sl4a-damon-clone,cristiana214/cristianachavez214-cristianachavez,matmutant/sl4a,liamgh/liamgreenhughes-sl4a-tf101,kerr-huang/SL4A,giuliolunati/sl4a,vlachoudis/sl4a,Lh4cKg/sl4a,barbarubra/Don-t-know-What-i-m-doing.,gonboy/sl4a,liamgh/liamgreenhughes-sl4a-tf101,giuliolunati/sl4a,matmutant/sl4a,yqm/sl4a,kjc88/sl4a,liamgh/liamgreenhughes-sl4a-tf101,kjc88/sl4a,kevinmel2000/sl4a,kevinmel2000/sl4a,vlachoudis/sl4a,cristiana214/cristianachavez214-cristianachavez,Lh4cKg/sl4a,yqm/sl4a,vlinhd11/vlinhd11-android-scripting,vlachoudis/sl4a,damonkohler/sl4a,kuri65536/sl4a,liamgh/liamgreenhughes-sl4a-tf101,kjc88/sl4a,matmutant/sl4a,gonboy/sl4a,jeremiahmarks/sl4a,kjc88/sl4a,wskplho/sl4a,olapaola/olapaola-android-scripting,olapaola/olapaola-android-scripting,Lh4cKg/sl4a,gonboy/sl4a,kdheepak89/sl4a,pforret/sl4a,kevinmel2000/sl4a,vlachoudis/sl4a,damonkohler/sl4a,valkjsaaa/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,mSenyor/sl4a,vlachoudis/sl4a,Lh4cKg/sl4a,kerr-huang/SL4A,giuliolunati/sl4a,mSenyor/sl4a,damonkohler/sl4a,kevinmel2000/sl4a,kevinmel2000/sl4a,wskplho/sl4a,jeremiahmarks/sl4a,miguelpalacio/sl4a,pforret/sl4a,wskplho/sl4a,miguelpalacio/sl4a,giuliolunati/sl4a,valkjsaaa/sl4a,vlinhd11/vlinhd11-android-scripting,valkjsaaa/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,barbarubra/Don-t-know-What-i-m-doing.,yqm/sl4a,yqm/sl4a,vlinhd11/vlinhd11-android-scripting,vlachoudis/sl4a,wskplho/sl4a,gonboy/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,kevinmel2000/sl4a,valkjsaaa/sl4a,kerr-huang/SL4A,mSenyor/sl4a,matmutant/sl4a,vlinhd11/vlinhd11-android-scripting,gonboy/sl4a,kuri65536/sl4a,damonkohler/sl4a,Lh4cKg/sl4a,giuliolunati/sl4a,vlachoudis/sl4a,kdheepak89/sl4a,pforret/sl4a,jeremiahmarks/sl4a,vlinhd11/vlinhd11-android-scripting,liamgh/liamgreenhughes-sl4a-tf101,mSenyor/sl4a,jeremiahmarks/sl4a,yqm/sl4a,kevinmel2000/sl4a,wskplho/sl4a,barbarubra/Don-t-know-What-i-m-doing.,matmutant/sl4a,barbarubra/Don-t-know-What-i-m-doing.,vlinhd11/vlinhd11-android-scripting,olapaola/olapaola-android-scripting,kerr-huang/SL4A,kjc88/sl4a,liamgh/liamgreenhughes-sl4a-tf101,thejeshgn/sl4a,vlinhd11/vlinhd11-android-scripting,valkjsaaa/sl4a,yqm/sl4a,matmutant/sl4a,cristiana214/cristianachavez214-cristianachavez,gonboy/sl4a,damonkohler/sl4a,pforret/sl4a,thejeshgn/sl4a,yqm/sl4a,liamgh/liamgreenhughes-sl4a-tf101,gonboy/sl4a,kerr-huang/SL4A,SatoshiNXSimudrone/sl4a-damon-clone,cristiana214/cristianachavez214-cristianachavez,liamgh/liamgreenhughes-sl4a-tf101,mSenyor/sl4a,yqm/sl4a,damonkohler/sl4a,vlachoudis/sl4a,jeremiahmarks/sl4a,miguelpalacio/sl4a,kjc88/sl4a,kjc88/sl4a,vlachoudis/sl4a,thejeshgn/sl4a,cristiana214/cristianachavez214-cristianachavez,miguelpalacio/sl4a,gonboy/sl4a,Lh4cKg/sl4a,barbarubra/Don-t-know-What-i-m-doing.,thejeshgn/sl4a,vlinhd11/vlinhd11-android-scripting,barbarubra/Don-t-know-What-i-m-doing.,gonboy/sl4a,valkjsaaa/sl4a,matmutant/sl4a,gonboy/sl4a,pforret/sl4a,kjc88/sl4a,kdheepak89/sl4a,valkjsaaa/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,kdheepak89/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,vlachoudis/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,olapaola/olapaola-android-scripting,wskplho/sl4a,olapaola/olapaola-android-scripting,wskplho/sl4a,miguelpalacio/sl4a,olapaola/olapaola-android-scripting,Lh4cKg/sl4a,kuri65536/sl4a,pforret/sl4a,damonkohler/sl4a,damonkohler/sl4a,mSenyor/sl4a,Lh4cKg/sl4a,olapaola/olapaola-android-scripting,cristiana214/cristianachavez214-cristianachavez,barbarubra/Don-t-know-What-i-m-doing.,vlinhd11/vlinhd11-android-scripting,damonkohler/sl4a,kevinmel2000/sl4a,wskplho/sl4a,matmutant/sl4a,kuri65536/sl4a,SatoshiNXSimudrone/sl4a-damon-clone,kuri65536/sl4a,yqm/sl4a,gonboy/sl4a,kevinmel2000/sl4a,kerr-huang/SL4A,barbarubra/Don-t-know-What-i-m-doing.
|
java
|
## Code Before:
/*
* Copyright (C) 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.ase.interpreter.sh;
import com.google.ase.AndroidFacade;
import com.google.ase.AndroidProxy;
import com.google.ase.interpreter.AbstractInterpreterProcess;
import com.google.ase.jsonrpc.JsonRpcServer;
public class ShInterpreterProcess extends AbstractInterpreterProcess {
private final AndroidProxy mAndroidProxy;
private final int mAndroidProxyPort;
public ShInterpreterProcess(AndroidFacade facade, String launchScript) {
super(facade, launchScript);
mAndroidProxy = new AndroidProxy(facade);
mAndroidProxyPort = new JsonRpcServer(mAndroidProxy).start();
buildEnvironment();
}
private void buildEnvironment() {
mEnvironment.put("AP_PORT", Integer.toString(mAndroidProxyPort));
}
}
## Instruction:
Add missing call to start shell scripts.
## Code After:
/*
* Copyright (C) 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.ase.interpreter.sh;
import com.google.ase.AndroidFacade;
import com.google.ase.AndroidProxy;
import com.google.ase.interpreter.AbstractInterpreterProcess;
import com.google.ase.jsonrpc.JsonRpcServer;
public class ShInterpreterProcess extends AbstractInterpreterProcess {
private final AndroidProxy mAndroidProxy;
private final int mAndroidProxyPort;
public ShInterpreterProcess(AndroidFacade facade, String launchScript) {
super(facade, launchScript);
mAndroidProxy = new AndroidProxy(facade);
mAndroidProxyPort = new JsonRpcServer(mAndroidProxy).start();
buildEnvironment();
}
private void buildEnvironment() {
mEnvironment.put("AP_PORT", Integer.toString(mAndroidProxyPort));
}
@Override
protected void writeInterpreterCommand() {
if (mLaunchScript != null) {
print(SHELL_BIN + " " + mLaunchScript + "\n");
}
}
}
|
...
private void buildEnvironment() {
mEnvironment.put("AP_PORT", Integer.toString(mAndroidProxyPort));
}
@Override
protected void writeInterpreterCommand() {
if (mLaunchScript != null) {
print(SHELL_BIN + " " + mLaunchScript + "\n");
}
}
}
...
|
b9a1a47361df09c4ef9b717afd6358aff982ecc5
|
setup.py
|
setup.py
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='[email protected]',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
|
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='[email protected]',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
Prepare to use marina as a package
|
Prepare to use marina as a package
|
Python
|
apache-2.0
|
inetprocess/docker-lamp,inetprocess/docker-lamp,edyan/stakkr,inetprocess/docker-lamp,edyan/stakkr,edyan/stakkr
|
python
|
## Code Before:
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='[email protected]',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='''
[console_scripts]
marina=cli:main
docker-clean=docker_clean:main
{}
'''.format(get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'requests>=2.11.0,<2.12',
'docker-compose',
'configobj'
]
)
## Instruction:
Prepare to use marina as a package
## Code After:
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
version='2.0',
description='A stack based on docker to run PHP Applications',
url='http://github.com/inetprocess/marina',
author='Emmanuel Dyan',
author_email='[email protected]',
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
|
...
from marina.plugins import get_plugins_configuration
from setuptools import setup, find_packages
def get_console_scripts():
"""Guess if we use marina as a package or if it has been cloned"""
scripts = "[console_scripts]\n"
try:
from marina import cli, docker_clean
scripts += "marina=marina.cli:main\n"
scripts += "docker-clean=marina.docker_clean:main\n"
except Exception:
scripts += "marina=cli:main\n"
scripts += "docker-clean=docker_clean:main\n"
return scripts
setup(
name='Marina',
...
license='Apache 2.0',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
py_modules=['marina'],
entry_points='{}{}'.format(get_console_scripts(), get_plugins_configuration()),
install_requires=[
'clint',
'click', 'click-plugins',
'docker-compose',
'configobj',
'requests>=2.11.0,<2.12'
]
)
...
|
a44efeac4f3c36a319b90ac3c71490259d26810e
|
ktor-client/ktor-client-tests/jvm/src/io/ktor/client/tests/utils/TestServer.kt
|
ktor-client/ktor-client-tests/jvm/src/io/ktor/client/tests/utils/TestServer.kt
|
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.tests.utils
import ch.qos.logback.classic.*
import io.ktor.client.tests.utils.tests.*
import io.ktor.server.engine.*
import io.ktor.server.jetty.*
import org.slf4j.*
import java.io.*
import java.util.concurrent.*
private const val DEFAULT_PORT: Int = 8080
private const val HTTP_PROXY_PORT: Int = 8082
internal fun startServer(): Closeable {
val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME) as ch.qos.logback.classic.Logger
logger.level = Level.WARN
val server = embeddedServer(Jetty, DEFAULT_PORT) {
tests()
benchmarks()
}.start()
val proxyServer = TestTcpServer(HTTP_PROXY_PORT, ::proxyHandler)
return Closeable {
proxyServer.close()
server.stop(0L, 0L, TimeUnit.MILLISECONDS)
}
}
/**
* Start server for tests.
*/
fun main() {
startServer()
}
|
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.tests.utils
import ch.qos.logback.classic.*
import io.ktor.client.tests.utils.tests.*
import io.ktor.server.engine.*
import io.ktor.server.jetty.*
import io.ktor.server.netty.*
import org.slf4j.*
import java.io.*
import java.util.concurrent.*
private const val DEFAULT_PORT: Int = 8080
private const val HTTP_PROXY_PORT: Int = 8082
internal fun startServer(): Closeable {
val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME) as ch.qos.logback.classic.Logger
logger.level = Level.WARN
val server = embeddedServer(Netty, DEFAULT_PORT) {
tests()
benchmarks()
}.start()
val proxyServer = TestTcpServer(HTTP_PROXY_PORT, ::proxyHandler)
return Closeable {
proxyServer.close()
server.stop(0L, 0L, TimeUnit.MILLISECONDS)
}
}
/**
* Start server for tests.
*/
fun main() {
startServer()
}
|
Use Netty as a default test server instead of Jetty (some issues like TCP half-close could be reproduced only on Netty).
|
Use Netty as a default test server instead of Jetty (some issues like TCP half-close could be reproduced only on Netty).
|
Kotlin
|
apache-2.0
|
ktorio/ktor,ktorio/ktor,ktorio/ktor,ktorio/ktor
|
kotlin
|
## Code Before:
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.tests.utils
import ch.qos.logback.classic.*
import io.ktor.client.tests.utils.tests.*
import io.ktor.server.engine.*
import io.ktor.server.jetty.*
import org.slf4j.*
import java.io.*
import java.util.concurrent.*
private const val DEFAULT_PORT: Int = 8080
private const val HTTP_PROXY_PORT: Int = 8082
internal fun startServer(): Closeable {
val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME) as ch.qos.logback.classic.Logger
logger.level = Level.WARN
val server = embeddedServer(Jetty, DEFAULT_PORT) {
tests()
benchmarks()
}.start()
val proxyServer = TestTcpServer(HTTP_PROXY_PORT, ::proxyHandler)
return Closeable {
proxyServer.close()
server.stop(0L, 0L, TimeUnit.MILLISECONDS)
}
}
/**
* Start server for tests.
*/
fun main() {
startServer()
}
## Instruction:
Use Netty as a default test server instead of Jetty (some issues like TCP half-close could be reproduced only on Netty).
## Code After:
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.tests.utils
import ch.qos.logback.classic.*
import io.ktor.client.tests.utils.tests.*
import io.ktor.server.engine.*
import io.ktor.server.jetty.*
import io.ktor.server.netty.*
import org.slf4j.*
import java.io.*
import java.util.concurrent.*
private const val DEFAULT_PORT: Int = 8080
private const val HTTP_PROXY_PORT: Int = 8082
internal fun startServer(): Closeable {
val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME) as ch.qos.logback.classic.Logger
logger.level = Level.WARN
val server = embeddedServer(Netty, DEFAULT_PORT) {
tests()
benchmarks()
}.start()
val proxyServer = TestTcpServer(HTTP_PROXY_PORT, ::proxyHandler)
return Closeable {
proxyServer.close()
server.stop(0L, 0L, TimeUnit.MILLISECONDS)
}
}
/**
* Start server for tests.
*/
fun main() {
startServer()
}
|
...
import io.ktor.client.tests.utils.tests.*
import io.ktor.server.engine.*
import io.ktor.server.jetty.*
import io.ktor.server.netty.*
import org.slf4j.*
import java.io.*
import java.util.concurrent.*
...
val logger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME) as ch.qos.logback.classic.Logger
logger.level = Level.WARN
val server = embeddedServer(Netty, DEFAULT_PORT) {
tests()
benchmarks()
}.start()
...
|
1d1a64c8a98d98a243307dd58ec3874f0369ce8f
|
tests/ex12_tests.py
|
tests/ex12_tests.py
|
from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
print(test_histogram)
assert_equal(test_histogram, '*\n**\n***\n')
|
from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
# assert_equal(test_histogram, '*\n**\n***\n')
|
Drop ex12 tests for now.
|
Drop ex12 tests for now.
|
Python
|
mit
|
gravyboat/python-exercises
|
python
|
## Code Before:
from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
print(test_histogram)
assert_equal(test_histogram, '*\n**\n***\n')
## Instruction:
Drop ex12 tests for now.
## Code After:
from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
# assert_equal(test_histogram, '*\n**\n***\n')
|
// ... existing code ...
'''
test_histogram = ex12.histogram([1, 2, 3])
# assert_equal(test_histogram, '*\n**\n***\n')
// ... rest of the code ...
|
d7df867b2a5e7c8f5255d9e7627999c3e2132e9c
|
example/tests/test_utils.py
|
example/tests/test_utils.py
|
from rest_framework_json_api import utils
from ..serializers import EntrySerializer
from ..tests import TestBase
class GetRelatedResourceTests(TestBase):
"""
Ensure the `get_related_resource_type` function returns correct types.
"""
def test_reverse_relation(self):
"""
Ensure reverse foreign keys have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['comments']
self.assertEqual(utils.get_related_resource_type(field), 'comments')
|
from rest_framework_json_api import utils
from ..serializers import EntrySerializer
from ..tests import TestBase
class GetRelatedResourceTests(TestBase):
"""
Ensure the `get_related_resource_type` function returns correct types.
"""
def test_reverse_relation(self):
"""
Ensure reverse foreign keys have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['comments']
self.assertEqual(utils.get_related_resource_type(field), 'comments')
def test_m2m_relation(self):
"""
Ensure m2ms have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['authors']
self.assertEqual(utils.get_related_resource_type(field), 'authors')
|
Add failing test for m2m too.
|
Add failing test for m2m too.
|
Python
|
bsd-2-clause
|
abdulhaq-e/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,Instawork/django-rest-framework-json-api,django-json-api/rest_framework_ember,django-json-api/django-rest-framework-json-api
|
python
|
## Code Before:
from rest_framework_json_api import utils
from ..serializers import EntrySerializer
from ..tests import TestBase
class GetRelatedResourceTests(TestBase):
"""
Ensure the `get_related_resource_type` function returns correct types.
"""
def test_reverse_relation(self):
"""
Ensure reverse foreign keys have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['comments']
self.assertEqual(utils.get_related_resource_type(field), 'comments')
## Instruction:
Add failing test for m2m too.
## Code After:
from rest_framework_json_api import utils
from ..serializers import EntrySerializer
from ..tests import TestBase
class GetRelatedResourceTests(TestBase):
"""
Ensure the `get_related_resource_type` function returns correct types.
"""
def test_reverse_relation(self):
"""
Ensure reverse foreign keys have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['comments']
self.assertEqual(utils.get_related_resource_type(field), 'comments')
def test_m2m_relation(self):
"""
Ensure m2ms have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['authors']
self.assertEqual(utils.get_related_resource_type(field), 'authors')
|
...
field = serializer.fields['comments']
self.assertEqual(utils.get_related_resource_type(field), 'comments')
def test_m2m_relation(self):
"""
Ensure m2ms have their types identified correctly.
"""
serializer = EntrySerializer()
field = serializer.fields['authors']
self.assertEqual(utils.get_related_resource_type(field), 'authors')
...
|
7c74017bc0d76ecb34e3fab44767290f51d98a09
|
humbug/test_settings.py
|
humbug/test_settings.py
|
from settings import *
DATABASES["default"] = {"NAME": "zephyr/tests/zephyrdb.test",
"ENGINE": "django.db.backends.sqlite3",
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
|
from settings import *
DATABASES["default"] = {"NAME": "zephyr/tests/zephyrdb.test",
"ENGINE": "django.db.backends.sqlite3",
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
# Decrease the get_updates timeout to 1 second.
# This allows CasperJS to proceed quickly to the next test step.
POLL_TIMEOUT = 1000
|
Decrease get_updates timeout for client test suite
|
Decrease get_updates timeout for client test suite
Fixes #475.
(imported from commit d8f908c55f2e519541e5383a742edbf23183539c)
|
Python
|
apache-2.0
|
ryanbackman/zulip,christi3k/zulip,dnmfarrell/zulip,littledogboy/zulip,LAndreas/zulip,timabbott/zulip,hayderimran7/zulip,xuxiao/zulip,deer-hope/zulip,zhaoweigg/zulip,m1ssou/zulip,wdaher/zulip,punchagan/zulip,lfranchi/zulip,kokoar/zulip,voidException/zulip,brainwane/zulip,blaze225/zulip,akuseru/zulip,JPJPJPOPOP/zulip,hayderimran7/zulip,MayB/zulip,ipernet/zulip,AZtheAsian/zulip,gigawhitlocks/zulip,bssrdf/zulip,bitemyapp/zulip,ashwinirudrappa/zulip,proliming/zulip,LeeRisk/zulip,littledogboy/zulip,JanzTam/zulip,developerfm/zulip,pradiptad/zulip,dotcool/zulip,alliejones/zulip,hj3938/zulip,Juanvulcano/zulip,qq1012803704/zulip,yocome/zulip,sharmaeklavya2/zulip,AZtheAsian/zulip,tdr130/zulip,vaidap/zulip,wangdeshui/zulip,sup95/zulip,blaze225/zulip,dnmfarrell/zulip,stamhe/zulip,Drooids/zulip,rht/zulip,suxinde2009/zulip,aakash-cr7/zulip,nicholasbs/zulip,qq1012803704/zulip,bowlofstew/zulip,aakash-cr7/zulip,mdavid/zulip,xuanhan863/zulip,wangdeshui/zulip,bastianh/zulip,hafeez3000/zulip,bssrdf/zulip,umkay/zulip,jphilipsen05/zulip,zofuthan/zulip,proliming/zulip,mansilladev/zulip,codeKonami/zulip,karamcnair/zulip,wavelets/zulip,paxapy/zulip,Cheppers/zulip,kaiyuanheshang/zulip,jerryge/zulip,hustlzp/zulip,nicholasbs/zulip,reyha/zulip,andersk/zulip,huangkebo/zulip,mahim97/zulip,pradiptad/zulip,jessedhillon/zulip,littledogboy/zulip,moria/zulip,DazWorrall/zulip,zofuthan/zulip,jimmy54/zulip,umkay/zulip,zorojean/zulip,xuxiao/zulip,sharmaeklavya2/zulip,stamhe/zulip,themass/zulip,Diptanshu8/zulip,andersk/zulip,technicalpickles/zulip,dhcrzf/zulip,Jianchun1/zulip,tommyip/zulip,ipernet/zulip,praveenaki/zulip,levixie/zulip,developerfm/zulip,praveenaki/zulip,punchagan/zulip,joshisa/zulip,saitodisse/zulip,hengqujushi/zulip,Batterfii/zulip,codeKonami/zulip,technicalpickles/zulip,hayderimran7/zulip,voidException/zulip,amanharitsh123/zulip,rishig/zulip,ryansnowboarder/zulip,LeeRisk/zulip,bastianh/zulip,Batterfii/zulip,ikasumiwt/zulip,ashwinirudrappa/zulip,Drooids/zulip,Cheppers/zulip,tommyip/zulip,Vallher/zulip,johnnygaddarr/zulip,wweiradio/zulip,jrowan/zulip,RobotCaleb/zulip,jphilipsen05/zulip,PhilSk/zulip,firstblade/zulip,suxinde2009/zulip,gkotian/zulip,punchagan/zulip,aakash-cr7/zulip,amyliu345/zulip,dwrpayne/zulip,moria/zulip,dxq-git/zulip,LAndreas/zulip,bssrdf/zulip,guiquanz/zulip,Gabriel0402/zulip,Batterfii/zulip,so0k/zulip,RobotCaleb/zulip,huangkebo/zulip,stamhe/zulip,AZtheAsian/zulip,noroot/zulip,xuxiao/zulip,dxq-git/zulip,vaidap/zulip,ufosky-server/zulip,rht/zulip,babbage/zulip,kaiyuanheshang/zulip,dawran6/zulip,aliceriot/zulip,dattatreya303/zulip,themass/zulip,noroot/zulip,timabbott/zulip,hj3938/zulip,RobotCaleb/zulip,brockwhittaker/zulip,Qgap/zulip,shrikrishnaholla/zulip,voidException/zulip,bowlofstew/zulip,verma-varsha/zulip,jeffcao/zulip,bitemyapp/zulip,praveenaki/zulip,LAndreas/zulip,proliming/zulip,aakash-cr7/zulip,jackrzhang/zulip,brainwane/zulip,Suninus/zulip,jainayush975/zulip,christi3k/zulip,Galexrt/zulip,gkotian/zulip,praveenaki/zulip,hackerkid/zulip,Cheppers/zulip,vakila/zulip,cosmicAsymmetry/zulip,niftynei/zulip,noroot/zulip,nicholasbs/zulip,suxinde2009/zulip,akuseru/zulip,noroot/zulip,natanovia/zulip,shubhamdhama/zulip,fw1121/zulip,eeshangarg/zulip,aakash-cr7/zulip,wangdeshui/zulip,grave-w-grave/zulip,ahmadassaf/zulip,andersk/zulip,vabs22/zulip,zofuthan/zulip,ashwinirudrappa/zulip,alliejones/zulip,Frouk/zulip,wdaher/zulip,pradiptad/zulip,bluesea/zulip,so0k/zulip,vikas-parashar/zulip,Cheppers/zulip,easyfmxu/zulip,bastianh/zulip,ufosky-server/zulip,mohsenSy/zulip,souravbadami/zulip,Batterfii/zulip,amanharitsh123/zulip,hackerkid/zulip,sharmaeklavya2/zulip,rishig/zulip,karamcnair/zulip,luyifan/zulip,jainayush975/zulip,hackerkid/zulip,glovebx/zulip,littledogboy/zulip,he15his/zulip,zwily/zulip,ApsOps/zulip,fw1121/zulip,showell/zulip,ufosky-server/zulip,dwrpayne/zulip,hengqujushi/zulip,bluesea/zulip,ikasumiwt/zulip,codeKonami/zulip,Juanvulcano/zulip,dotcool/zulip,tommyip/zulip,umkay/zulip,sup95/zulip,showell/zulip,mdavid/zulip,sonali0901/zulip,thomasboyt/zulip,natanovia/zulip,babbage/zulip,mahim97/zulip,JPJPJPOPOP/zulip,KJin99/zulip,bowlofstew/zulip,synicalsyntax/zulip,dawran6/zulip,timabbott/zulip,zofuthan/zulip,Batterfii/zulip,souravbadami/zulip,Drooids/zulip,wavelets/zulip,verma-varsha/zulip,Galexrt/zulip,atomic-labs/zulip,LeeRisk/zulip,so0k/zulip,swinghu/zulip,armooo/zulip,glovebx/zulip,xuanhan863/zulip,zachallaun/zulip,littledogboy/zulip,jeffcao/zulip,aliceriot/zulip,jphilipsen05/zulip,Frouk/zulip,grave-w-grave/zulip,karamcnair/zulip,bluesea/zulip,schatt/zulip,deer-hope/zulip,showell/zulip,armooo/zulip,hayderimran7/zulip,tiansiyuan/zulip,codeKonami/zulip,vakila/zulip,nicholasbs/zulip,firstblade/zulip,eeshangarg/zulip,johnny9/zulip,luyifan/zulip,zorojean/zulip,avastu/zulip,jackrzhang/zulip,ipernet/zulip,ashwinirudrappa/zulip,yuvipanda/zulip,tommyip/zulip,paxapy/zulip,synicalsyntax/zulip,technicalpickles/zulip,shubhamdhama/zulip,ryansnowboarder/zulip,seapasulli/zulip,grave-w-grave/zulip,jonesgithub/zulip,armooo/zulip,bssrdf/zulip,wavelets/zulip,sonali0901/zulip,brockwhittaker/zulip,jphilipsen05/zulip,MayB/zulip,hj3938/zulip,ryansnowboarder/zulip,wangdeshui/zulip,eastlhu/zulip,Diptanshu8/zulip,he15his/zulip,ipernet/zulip,kaiyuanheshang/zulip,JanzTam/zulip,glovebx/zulip,aliceriot/zulip,vikas-parashar/zulip,thomasboyt/zulip,susansls/zulip,swinghu/zulip,wweiradio/zulip,EasonYi/zulip,rht/zulip,seapasulli/zulip,SmartPeople/zulip,Qgap/zulip,umkay/zulip,tdr130/zulip,dxq-git/zulip,MariaFaBella85/zulip,Vallher/zulip,jackrzhang/zulip,saitodisse/zulip,levixie/zulip,udxxabp/zulip,willingc/zulip,babbage/zulip,yuvipanda/zulip,ryanbackman/zulip,Galexrt/zulip,aps-sids/zulip,suxinde2009/zulip,babbage/zulip,peiwei/zulip,dwrpayne/zulip,ahmadassaf/zulip,krtkmj/zulip,jonesgithub/zulip,wavelets/zulip,dattatreya303/zulip,dotcool/zulip,eastlhu/zulip,showell/zulip,bssrdf/zulip,jeffcao/zulip,ufosky-server/zulip,paxapy/zulip,ufosky-server/zulip,sonali0901/zulip,sharmaeklavya2/zulip,dawran6/zulip,jessedhillon/zulip,verma-varsha/zulip,dhcrzf/zulip,shrikrishnaholla/zulip,zorojean/zulip,vikas-parashar/zulip,Gabriel0402/zulip,atomic-labs/zulip,Jianchun1/zulip,guiquanz/zulip,rishig/zulip,ryansnowboarder/zulip,kou/zulip,ryanbackman/zulip,MayB/zulip,jainayush975/zulip,jackrzhang/zulip,eastlhu/zulip,hengqujushi/zulip,sonali0901/zulip,synicalsyntax/zulip,cosmicAsymmetry/zulip,saitodisse/zulip,Gabriel0402/zulip,firstblade/zulip,christi3k/zulip,suxinde2009/zulip,umkay/zulip,zhaoweigg/zulip,Suninus/zulip,shaunstanislaus/zulip,zulip/zulip,technicalpickles/zulip,Juanvulcano/zulip,kokoar/zulip,suxinde2009/zulip,fw1121/zulip,dxq-git/zulip,dattatreya303/zulip,ikasumiwt/zulip,moria/zulip,shubhamdhama/zulip,krtkmj/zulip,zacps/zulip,andersk/zulip,Jianchun1/zulip,jessedhillon/zulip,jonesgithub/zulip,cosmicAsymmetry/zulip,ahmadassaf/zulip,MariaFaBella85/zulip,proliming/zulip,calvinleenyc/zulip,deer-hope/zulip,LAndreas/zulip,gkotian/zulip,dnmfarrell/zulip,sharmaeklavya2/zulip,JPJPJPOPOP/zulip,Jianchun1/zulip,arpitpanwar/zulip,hayderimran7/zulip,Gabriel0402/zulip,EasonYi/zulip,zacps/zulip,Juanvulcano/zulip,codeKonami/zulip,esander91/zulip,DazWorrall/zulip,souravbadami/zulip,kou/zulip,jimmy54/zulip,aliceriot/zulip,jessedhillon/zulip,itnihao/zulip,calvinleenyc/zulip,technicalpickles/zulip,wweiradio/zulip,jimmy54/zulip,joshisa/zulip,firstblade/zulip,avastu/zulip,tbutter/zulip,LeeRisk/zulip,reyha/zulip,KJin99/zulip,vabs22/zulip,j831/zulip,gigawhitlocks/zulip,paxapy/zulip,brainwane/zulip,tbutter/zulip,vakila/zulip,akuseru/zulip,jerryge/zulip,noroot/zulip,easyfmxu/zulip,so0k/zulip,ahmadassaf/zulip,dhcrzf/zulip,PhilSk/zulip,johnny9/zulip,wweiradio/zulip,ericzhou2008/zulip,joshisa/zulip,shubhamdhama/zulip,dxq-git/zulip,itnihao/zulip,arpitpanwar/zulip,tommyip/zulip,kaiyuanheshang/zulip,babbage/zulip,yuvipanda/zulip,avastu/zulip,SmartPeople/zulip,Vallher/zulip,ApsOps/zulip,mansilladev/zulip,KingxBanana/zulip,zhaoweigg/zulip,j831/zulip,isht3/zulip,EasonYi/zulip,tdr130/zulip,Diptanshu8/zulip,niftynei/zulip,ipernet/zulip,Batterfii/zulip,adnanh/zulip,adnanh/zulip,bitemyapp/zulip,dawran6/zulip,KingxBanana/zulip,MariaFaBella85/zulip,peiwei/zulip,praveenaki/zulip,dxq-git/zulip,vakila/zulip,xuxiao/zulip,dhcrzf/zulip,zwily/zulip,hengqujushi/zulip,tbutter/zulip,showell/zulip,bluesea/zulip,SmartPeople/zulip,andersk/zulip,shubhamdhama/zulip,dhcrzf/zulip,PaulPetring/zulip,lfranchi/zulip,bastianh/zulip,mohsenSy/zulip,calvinleenyc/zulip,aps-sids/zulip,luyifan/zulip,technicalpickles/zulip,alliejones/zulip,jrowan/zulip,krtkmj/zulip,DazWorrall/zulip,ikasumiwt/zulip,tiansiyuan/zulip,Drooids/zulip,krtkmj/zulip,yuvipanda/zulip,kou/zulip,eeshangarg/zulip,shrikrishnaholla/zulip,schatt/zulip,he15his/zulip,mdavid/zulip,zachallaun/zulip,gkotian/zulip,he15his/zulip,shaunstanislaus/zulip,Frouk/zulip,rht/zulip,lfranchi/zulip,zulip/zulip,xuxiao/zulip,littledogboy/zulip,Suninus/zulip,hustlzp/zulip,showell/zulip,brainwane/zulip,dotcool/zulip,itnihao/zulip,hj3938/zulip,MayB/zulip,tdr130/zulip,DazWorrall/zulip,pradiptad/zulip,zhaoweigg/zulip,aps-sids/zulip,punchagan/zulip,AZtheAsian/zulip,dotcool/zulip,willingc/zulip,rishig/zulip,umkay/zulip,ahmadassaf/zulip,Galexrt/zulip,hj3938/zulip,tbutter/zulip,fw1121/zulip,noroot/zulip,susansls/zulip,bowlofstew/zulip,jerryge/zulip,firstblade/zulip,xuxiao/zulip,amallia/zulip,krtkmj/zulip,bastianh/zulip,m1ssou/zulip,noroot/zulip,hackerkid/zulip,ericzhou2008/zulip,LAndreas/zulip,karamcnair/zulip,Cheppers/zulip,peguin40/zulip,tbutter/zulip,samatdav/zulip,udxxabp/zulip,armooo/zulip,mahim97/zulip,vakila/zulip,verma-varsha/zulip,susansls/zulip,grave-w-grave/zulip,isht3/zulip,andersk/zulip,jackrzhang/zulip,ryanbackman/zulip,babbage/zulip,timabbott/zulip,vabs22/zulip,KingxBanana/zulip,luyifan/zulip,dhcrzf/zulip,mohsenSy/zulip,so0k/zulip,KJin99/zulip,shrikrishnaholla/zulip,jimmy54/zulip,atomic-labs/zulip,zachallaun/zulip,mansilladev/zulip,zhaoweigg/zulip,mohsenSy/zulip,karamcnair/zulip,kaiyuanheshang/zulip,codeKonami/zulip,shrikrishnaholla/zulip,calvinleenyc/zulip,wweiradio/zulip,sup95/zulip,PaulPetring/zulip,vakila/zulip,peiwei/zulip,dnmfarrell/zulip,ufosky-server/zulip,jimmy54/zulip,grave-w-grave/zulip,itnihao/zulip,ikasumiwt/zulip,gigawhitlocks/zulip,johnny9/zulip,sonali0901/zulip,johnny9/zulip,willingc/zulip,littledogboy/zulip,easyfmxu/zulip,eastlhu/zulip,PhilSk/zulip,joyhchen/zulip,m1ssou/zulip,synicalsyntax/zulip,swinghu/zulip,glovebx/zulip,yuvipanda/zulip,Suninus/zulip,joyhchen/zulip,codeKonami/zulip,tbutter/zulip,nicholasbs/zulip,susansls/zulip,jimmy54/zulip,cosmicAsymmetry/zulip,shrikrishnaholla/zulip,pradiptad/zulip,grave-w-grave/zulip,mohsenSy/zulip,he15his/zulip,aps-sids/zulip,samatdav/zulip,eastlhu/zulip,vikas-parashar/zulip,reyha/zulip,KJin99/zulip,zulip/zulip,ufosky-server/zulip,ApsOps/zulip,zacps/zulip,levixie/zulip,LeeRisk/zulip,fw1121/zulip,Gabriel0402/zulip,kou/zulip,Galexrt/zulip,zachallaun/zulip,aps-sids/zulip,tommyip/zulip,vaidap/zulip,themass/zulip,firstblade/zulip,jeffcao/zulip,natanovia/zulip,arpith/zulip,joyhchen/zulip,shaunstanislaus/zulip,bssrdf/zulip,guiquanz/zulip,alliejones/zulip,rht/zulip,sup95/zulip,so0k/zulip,ashwinirudrappa/zulip,arpitpanwar/zulip,brockwhittaker/zulip,joshisa/zulip,j831/zulip,jainayush975/zulip,samatdav/zulip,swinghu/zulip,yocome/zulip,saitodisse/zulip,reyha/zulip,joshisa/zulip,natanovia/zulip,zofuthan/zulip,yocome/zulip,DazWorrall/zulip,udxxabp/zulip,wdaher/zulip,shaunstanislaus/zulip,dotcool/zulip,willingc/zulip,proliming/zulip,JPJPJPOPOP/zulip,gigawhitlocks/zulip,isht3/zulip,swinghu/zulip,showell/zulip,tiansiyuan/zulip,stamhe/zulip,themass/zulip,Frouk/zulip,jonesgithub/zulip,JanzTam/zulip,eastlhu/zulip,avastu/zulip,glovebx/zulip,KJin99/zulip,eeshangarg/zulip,arpitpanwar/zulip,qq1012803704/zulip,johnnygaddarr/zulip,MariaFaBella85/zulip,thomasboyt/zulip,tbutter/zulip,huangkebo/zulip,souravbadami/zulip,qq1012803704/zulip,mansilladev/zulip,shrikrishnaholla/zulip,JanzTam/zulip,ashwinirudrappa/zulip,johnny9/zulip,dawran6/zulip,moria/zulip,jackrzhang/zulip,shaunstanislaus/zulip,he15his/zulip,punchagan/zulip,ericzhou2008/zulip,zachallaun/zulip,saitodisse/zulip,udxxabp/zulip,developerfm/zulip,arpith/zulip,JanzTam/zulip,arpitpanwar/zulip,zwily/zulip,gigawhitlocks/zulip,m1ssou/zulip,kokoar/zulip,RobotCaleb/zulip,amyliu345/zulip,dattatreya303/zulip,johnny9/zulip,Jianchun1/zulip,MayB/zulip,suxinde2009/zulip,m1ssou/zulip,Juanvulcano/zulip,rht/zulip,ericzhou2008/zulip,aliceriot/zulip,jessedhillon/zulip,JPJPJPOPOP/zulip,moria/zulip,johnny9/zulip,andersk/zulip,kokoar/zulip,voidException/zulip,stamhe/zulip,gkotian/zulip,LeeRisk/zulip,lfranchi/zulip,hustlzp/zulip,rishig/zulip,rishig/zulip,timabbott/zulip,natanovia/zulip,pradiptad/zulip,willingc/zulip,amyliu345/zulip,deer-hope/zulip,johnnygaddarr/zulip,developerfm/zulip,alliejones/zulip,DazWorrall/zulip,vikas-parashar/zulip,esander91/zulip,lfranchi/zulip,luyifan/zulip,akuseru/zulip,umkay/zulip,amanharitsh123/zulip,johnnygaddarr/zulip,peguin40/zulip,adnanh/zulip,jphilipsen05/zulip,moria/zulip,levixie/zulip,wangdeshui/zulip,DazWorrall/zulip,joyhchen/zulip,mdavid/zulip,KingxBanana/zulip,yocome/zulip,Gabriel0402/zulip,MayB/zulip,natanovia/zulip,huangkebo/zulip,TigorC/zulip,saitodisse/zulip,tiansiyuan/zulip,gigawhitlocks/zulip,lfranchi/zulip,LAndreas/zulip,rishig/zulip,dwrpayne/zulip,j831/zulip,Vallher/zulip,cosmicAsymmetry/zulip,isht3/zulip,brainwane/zulip,dattatreya303/zulip,sharmaeklavya2/zulip,synicalsyntax/zulip,kokoar/zulip,aliceriot/zulip,zwily/zulip,niftynei/zulip,guiquanz/zulip,SmartPeople/zulip,schatt/zulip,zulip/zulip,deer-hope/zulip,akuseru/zulip,vikas-parashar/zulip,PaulPetring/zulip,dhcrzf/zulip,huangkebo/zulip,xuanhan863/zulip,deer-hope/zulip,Cheppers/zulip,dwrpayne/zulip,Suninus/zulip,udxxabp/zulip,Vallher/zulip,ericzhou2008/zulip,fw1121/zulip,avastu/zulip,amyliu345/zulip,vaidap/zulip,kokoar/zulip,amallia/zulip,seapasulli/zulip,bitemyapp/zulip,Juanvulcano/zulip,atomic-labs/zulip,isht3/zulip,vabs22/zulip,vakila/zulip,zofuthan/zulip,Diptanshu8/zulip,kou/zulip,hj3938/zulip,arpith/zulip,akuseru/zulip,blaze225/zulip,themass/zulip,jainayush975/zulip,zachallaun/zulip,hafeez3000/zulip,kou/zulip,amanharitsh123/zulip,Drooids/zulip,jerryge/zulip,arpith/zulip,technicalpickles/zulip,fw1121/zulip,karamcnair/zulip,wavelets/zulip,nicholasbs/zulip,jeffcao/zulip,wdaher/zulip,huangkebo/zulip,LeeRisk/zulip,voidException/zulip,Frouk/zulip,bowlofstew/zulip,swinghu/zulip,ryanbackman/zulip,luyifan/zulip,sonali0901/zulip,zorojean/zulip,TigorC/zulip,peguin40/zulip,cosmicAsymmetry/zulip,MariaFaBella85/zulip,zulip/zulip,developerfm/zulip,christi3k/zulip,AZtheAsian/zulip,kou/zulip,armooo/zulip,zwily/zulip,jimmy54/zulip,bastianh/zulip,Galexrt/zulip,Suninus/zulip,mdavid/zulip,j831/zulip,he15his/zulip,wweiradio/zulip,JPJPJPOPOP/zulip,jonesgithub/zulip,qq1012803704/zulip,udxxabp/zulip,amallia/zulip,amallia/zulip,shubhamdhama/zulip,hafeez3000/zulip,m1ssou/zulip,kaiyuanheshang/zulip,johnnygaddarr/zulip,bowlofstew/zulip,arpith/zulip,isht3/zulip,m1ssou/zulip,voidException/zulip,timabbott/zulip,peguin40/zulip,Qgap/zulip,amallia/zulip,thomasboyt/zulip,PaulPetring/zulip,niftynei/zulip,ipernet/zulip,itnihao/zulip,reyha/zulip,lfranchi/zulip,RobotCaleb/zulip,verma-varsha/zulip,wdaher/zulip,Qgap/zulip,calvinleenyc/zulip,Frouk/zulip,eeshangarg/zulip,zacps/zulip,niftynei/zulip,arpitpanwar/zulip,hengqujushi/zulip,souravbadami/zulip,LAndreas/zulip,blaze225/zulip,Drooids/zulip,hayderimran7/zulip,zwily/zulip,itnihao/zulip,hayderimran7/zulip,adnanh/zulip,mohsenSy/zulip,jerryge/zulip,esander91/zulip,zhaoweigg/zulip,mahim97/zulip,zachallaun/zulip,EasonYi/zulip,bssrdf/zulip,joshisa/zulip,eeshangarg/zulip,arpith/zulip,ryansnowboarder/zulip,reyha/zulip,dnmfarrell/zulip,dawran6/zulip,hengqujushi/zulip,easyfmxu/zulip,ryansnowboarder/zulip,luyifan/zulip,guiquanz/zulip,christi3k/zulip,karamcnair/zulip,zwily/zulip,zulip/zulip,easyfmxu/zulip,hackerkid/zulip,tiansiyuan/zulip,atomic-labs/zulip,arpitpanwar/zulip,gigawhitlocks/zulip,bitemyapp/zulip,vabs22/zulip,MayB/zulip,ApsOps/zulip,Qgap/zulip,zorojean/zulip,hafeez3000/zulip,Cheppers/zulip,esander91/zulip,pradiptad/zulip,Vallher/zulip,jonesgithub/zulip,PhilSk/zulip,vaidap/zulip,Suninus/zulip,esander91/zulip,tiansiyuan/zulip,hafeez3000/zulip,adnanh/zulip,glovebx/zulip,peguin40/zulip,synicalsyntax/zulip,xuanhan863/zulip,seapasulli/zulip,hafeez3000/zulip,brockwhittaker/zulip,tommyip/zulip,dnmfarrell/zulip,johnnygaddarr/zulip,Galexrt/zulip,joyhchen/zulip,levixie/zulip,natanovia/zulip,TigorC/zulip,armooo/zulip,JanzTam/zulip,krtkmj/zulip,Drooids/zulip,jrowan/zulip,EasonYi/zulip,brockwhittaker/zulip,brainwane/zulip,alliejones/zulip,levixie/zulip,verma-varsha/zulip,gkotian/zulip,thomasboyt/zulip,proliming/zulip,udxxabp/zulip,paxapy/zulip,armooo/zulip,amanharitsh123/zulip,akuseru/zulip,jackrzhang/zulip,seapasulli/zulip,seapasulli/zulip,niftynei/zulip,Jianchun1/zulip,bluesea/zulip,joshisa/zulip,zofuthan/zulip,xuanhan863/zulip,jessedhillon/zulip,synicalsyntax/zulip,atomic-labs/zulip,shaunstanislaus/zulip,hustlzp/zulip,deer-hope/zulip,easyfmxu/zulip,PhilSk/zulip,RobotCaleb/zulip,praveenaki/zulip,proliming/zulip,jerryge/zulip,jerryge/zulip,schatt/zulip,KingxBanana/zulip,bluesea/zulip,kokoar/zulip,gkotian/zulip,MariaFaBella85/zulip,punchagan/zulip,amyliu345/zulip,TigorC/zulip,huangkebo/zulip,seapasulli/zulip,babbage/zulip,moria/zulip,Vallher/zulip,jainayush975/zulip,mdavid/zulip,saitodisse/zulip,ikasumiwt/zulip,hengqujushi/zulip,zacps/zulip,RobotCaleb/zulip,tdr130/zulip,AZtheAsian/zulip,shubhamdhama/zulip,SmartPeople/zulip,wavelets/zulip,swinghu/zulip,dnmfarrell/zulip,dotcool/zulip,voidException/zulip,zulip/zulip,timabbott/zulip,PaulPetring/zulip,schatt/zulip,guiquanz/zulip,bitemyapp/zulip,ericzhou2008/zulip,wangdeshui/zulip,dxq-git/zulip,souravbadami/zulip,mansilladev/zulip,kaiyuanheshang/zulip,JanzTam/zulip,EasonYi/zulip,developerfm/zulip,thomasboyt/zulip,zhaoweigg/zulip,Diptanshu8/zulip,joyhchen/zulip,zorojean/zulip,peiwei/zulip,ApsOps/zulip,hustlzp/zulip,glovebx/zulip,zorojean/zulip,hj3938/zulip,ikasumiwt/zulip,KingxBanana/zulip,easyfmxu/zulip,rht/zulip,tdr130/zulip,dwrpayne/zulip,eastlhu/zulip,tdr130/zulip,nicholasbs/zulip,PaulPetring/zulip,ahmadassaf/zulip,willingc/zulip,guiquanz/zulip,aakash-cr7/zulip,wangdeshui/zulip,stamhe/zulip,itnihao/zulip,atomic-labs/zulip,ashwinirudrappa/zulip,jphilipsen05/zulip,levixie/zulip,wdaher/zulip,dattatreya303/zulip,hafeez3000/zulip,hackerkid/zulip,adnanh/zulip,jrowan/zulip,EasonYi/zulip,thomasboyt/zulip,susansls/zulip,Diptanshu8/zulip,sup95/zulip,mdavid/zulip,mahim97/zulip,ApsOps/zulip,mansilladev/zulip,wavelets/zulip,themass/zulip,amallia/zulip,avastu/zulip,j831/zulip,qq1012803704/zulip,mansilladev/zulip,wweiradio/zulip,peiwei/zulip,yocome/zulip,eeshangarg/zulip,susansls/zulip,punchagan/zulip,samatdav/zulip,jeffcao/zulip,KJin99/zulip,alliejones/zulip,esander91/zulip,calvinleenyc/zulip,jonesgithub/zulip,peiwei/zulip,bluesea/zulip,paxapy/zulip,bastianh/zulip,ApsOps/zulip,vabs22/zulip,schatt/zulip,samatdav/zulip,jrowan/zulip,jeffcao/zulip,Gabriel0402/zulip,PhilSk/zulip,TigorC/zulip,praveenaki/zulip,dwrpayne/zulip,amyliu345/zulip,amallia/zulip,johnnygaddarr/zulip,shaunstanislaus/zulip,ahmadassaf/zulip,ericzhou2008/zulip,esander91/zulip,jrowan/zulip,sup95/zulip,Qgap/zulip,Frouk/zulip,themass/zulip,blaze225/zulip,avastu/zulip,bowlofstew/zulip,aps-sids/zulip,vaidap/zulip,ipernet/zulip,xuanhan863/zulip,KJin99/zulip,stamhe/zulip,yocome/zulip,TigorC/zulip,aps-sids/zulip,firstblade/zulip,hackerkid/zulip,ryanbackman/zulip,hustlzp/zulip,SmartPeople/zulip,blaze225/zulip,mahim97/zulip,willingc/zulip,developerfm/zulip,yuvipanda/zulip,samatdav/zulip,MariaFaBella85/zulip,ryansnowboarder/zulip,christi3k/zulip,brainwane/zulip,yocome/zulip,Batterfii/zulip,tiansiyuan/zulip,xuanhan863/zulip,bitemyapp/zulip,adnanh/zulip,amanharitsh123/zulip,jessedhillon/zulip,hustlzp/zulip,xuxiao/zulip,schatt/zulip,brockwhittaker/zulip,wdaher/zulip,peguin40/zulip,so0k/zulip,qq1012803704/zulip,peiwei/zulip,yuvipanda/zulip,zacps/zulip,PaulPetring/zulip,krtkmj/zulip,Qgap/zulip,aliceriot/zulip
|
python
|
## Code Before:
from settings import *
DATABASES["default"] = {"NAME": "zephyr/tests/zephyrdb.test",
"ENGINE": "django.db.backends.sqlite3",
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
## Instruction:
Decrease get_updates timeout for client test suite
Fixes #475.
(imported from commit d8f908c55f2e519541e5383a742edbf23183539c)
## Code After:
from settings import *
DATABASES["default"] = {"NAME": "zephyr/tests/zephyrdb.test",
"ENGINE": "django.db.backends.sqlite3",
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
# Decrease the get_updates timeout to 1 second.
# This allows CasperJS to proceed quickly to the next test step.
POLL_TIMEOUT = 1000
|
# ... existing code ...
"OPTIONS": { "timeout": 20, },}
TORNADO_SERVER = 'http://localhost:9983'
# Decrease the get_updates timeout to 1 second.
# This allows CasperJS to proceed quickly to the next test step.
POLL_TIMEOUT = 1000
# ... rest of the code ...
|
684ac5e6e6011581d5abcb42a7c0e54742f20606
|
Arduino/IMUstream_WifiUDP_iot33/read_UDP_JSON_IMU.py
|
Arduino/IMUstream_WifiUDP_iot33/read_UDP_JSON_IMU.py
|
import socket, traceback
import time
import json
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
print(message)
json.loads(message.decode("utf-8"))
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
|
import socket, traceback
import time
import json
import numpy as np
from scipy.spatial.transform import Rotation as R
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
# Place IMU x-axis into wind going direction when launching script
is_init_done = False
wind_yaw = 0
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
#print(message)
msg = json.loads(message.decode("utf-8"))
if is_init_done==False:
wind_yaw = msg["Yaw"]
is_init_done = True
msg['Yaw'] = msg['Yaw']-wind_yaw
print(msg)
ypr = [msg['Yaw'], msg['Pitch'], msg['Roll']]
seq = 'ZYX' # small letters from intrinsic rotations
r = R.from_euler(seq, ypr, degrees=True)
# Compute coordinates in NED (could be useful to compare position with GPS position for example)
line_length = 10
base_to_kite = [0, 0, line_length]
base_to_kite_in_NED = r.apply(base_to_kite)
# Express kite coordinates as great roll, great pitch and small yaw angles
grpy=r.as_euler(seq="XYZ")
print(grpy*180/np.pi)
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
|
Add computations of great roll, pitch and small yaw angle (kite angles)
|
Add computations of great roll, pitch and small yaw angle (kite angles)
|
Python
|
mit
|
baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite
|
python
|
## Code Before:
import socket, traceback
import time
import json
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
print(message)
json.loads(message.decode("utf-8"))
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
## Instruction:
Add computations of great roll, pitch and small yaw angle (kite angles)
## Code After:
import socket, traceback
import time
import json
import numpy as np
from scipy.spatial.transform import Rotation as R
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
# Place IMU x-axis into wind going direction when launching script
is_init_done = False
wind_yaw = 0
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
#print(message)
msg = json.loads(message.decode("utf-8"))
if is_init_done==False:
wind_yaw = msg["Yaw"]
is_init_done = True
msg['Yaw'] = msg['Yaw']-wind_yaw
print(msg)
ypr = [msg['Yaw'], msg['Pitch'], msg['Roll']]
seq = 'ZYX' # small letters from intrinsic rotations
r = R.from_euler(seq, ypr, degrees=True)
# Compute coordinates in NED (could be useful to compare position with GPS position for example)
line_length = 10
base_to_kite = [0, 0, line_length]
base_to_kite_in_NED = r.apply(base_to_kite)
# Express kite coordinates as great roll, great pitch and small yaw angles
grpy=r.as_euler(seq="XYZ")
print(grpy*180/np.pi)
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
|
// ... existing code ...
import socket, traceback
import time
import json
import numpy as np
from scipy.spatial.transform import Rotation as R
host = ''
port = 2390
// ... modified code ...
filein = open('saveUDP.txt', 'w')
t0 = time.time()
# Place IMU x-axis into wind going direction when launching script
is_init_done = False
wind_yaw = 0
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
#print(message)
msg = json.loads(message.decode("utf-8"))
if is_init_done==False:
wind_yaw = msg["Yaw"]
is_init_done = True
msg['Yaw'] = msg['Yaw']-wind_yaw
print(msg)
ypr = [msg['Yaw'], msg['Pitch'], msg['Roll']]
seq = 'ZYX' # small letters from intrinsic rotations
r = R.from_euler(seq, ypr, degrees=True)
# Compute coordinates in NED (could be useful to compare position with GPS position for example)
line_length = 10
base_to_kite = [0, 0, line_length]
base_to_kite_in_NED = r.apply(base_to_kite)
# Express kite coordinates as great roll, great pitch and small yaw angles
grpy=r.as_euler(seq="XYZ")
print(grpy*180/np.pi)
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
// ... rest of the code ...
|
62b6c26e44205e58c5d80a4db3bca9aec136c5ca
|
src/test/java/org/narwhal/core/MyFirstTest.java
|
src/test/java/org/narwhal/core/MyFirstTest.java
|
package org.narwhal.core;
/**
* Date: 28.02.13
* Time: 20:28
* Author: Miron Aseev
*/
public class MyFirstTest {
}
|
package org.narwhal.core;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Date: 28.02.13
* Time: 20:28
* Author: Miron Aseev
*/
@RunWith(JUnit4.class)
public class MyFirstTest {
@Test
public void myFirstTestMethod() {
}
@Test
public void mySecondTestMethod() {
}
}
|
Test stub has been implemented
|
Test stub has been implemented
|
Java
|
mit
|
maseev/narwhal
|
java
|
## Code Before:
package org.narwhal.core;
/**
* Date: 28.02.13
* Time: 20:28
* Author: Miron Aseev
*/
public class MyFirstTest {
}
## Instruction:
Test stub has been implemented
## Code After:
package org.narwhal.core;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Date: 28.02.13
* Time: 20:28
* Author: Miron Aseev
*/
@RunWith(JUnit4.class)
public class MyFirstTest {
@Test
public void myFirstTestMethod() {
}
@Test
public void mySecondTestMethod() {
}
}
|
...
package org.narwhal.core;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Date: 28.02.13
...
* Time: 20:28
* Author: Miron Aseev
*/
@RunWith(JUnit4.class)
public class MyFirstTest {
@Test
public void myFirstTestMethod() {
}
@Test
public void mySecondTestMethod() {
}
}
...
|
4035afc6fa7f47219a39ad66f902bb90c6e81aa1
|
pyopenapi/scanner/type_reducer.py
|
pyopenapi/scanner/type_reducer.py
|
from __future__ import absolute_import
from ..scan import Dispatcher
from ..errs import SchemaError
from ..spec.v3_0_0.objects import Operation
from ..utils import scope_compose
from ..consts import private
class TypeReduce(object):
""" Type Reducer, collect Operation & Model
spreaded in Resources put in a global accessible place.
"""
class Disp(Dispatcher): pass
def __init__(self, sep=private.SCOPE_SEPARATOR):
self.op = {}
self.__sep = sep
@Disp.register([Operation])
def _op(self, path, obj, _):
scope = obj.tags[0] if obj.tags and len(obj.tags) > 0 else None
name = obj.operationId if obj.operationId else None
# in swagger 2.0, both 'operationId' and 'tags' are optional.
# When 'operationId' is empty, it causes 'scope_compose' return something
# duplicated with other Operations with the same tag.
if not name:
return
new_scope = scope_compose(scope, name, sep=self.__sep)
if new_scope:
if new_scope in self.op.keys():
raise SchemaError('duplicated key found: ' + new_scope)
self.op[new_scope] = obj
|
from __future__ import absolute_import
from ..scan import Dispatcher
from ..errs import SchemaError
from ..spec.v3_0_0.objects import Operation as Op3
from ..spec.v2_0.objects import Operation as Op2
from ..utils import scope_compose
from ..consts import private
class TypeReduce(object):
""" Type Reducer, collect Operation & Model
spreaded in Resources put in a global accessible place.
"""
class Disp(Dispatcher): pass
def __init__(self, sep=private.SCOPE_SEPARATOR):
self.op = {}
self.__sep = sep
@Disp.register([Op3, Op2])
def _op(self, path, obj, _):
scope = obj.tags[0] if obj.tags and len(obj.tags) > 0 else None
name = obj.operationId if obj.operationId else None
# in swagger 2.0, both 'operationId' and 'tags' are optional.
# When 'operationId' is empty, it causes 'scope_compose' return something
# duplicated with other Operations with the same tag.
if not name:
return
new_scope = scope_compose(scope, name, sep=self.__sep)
if new_scope:
if new_scope in self.op.keys():
raise SchemaError('duplicated key found: ' + new_scope)
self.op[new_scope] = obj
|
Allow to reduce Operations in 2.0 and 3.0.0 to App.op
|
Allow to reduce Operations in 2.0 and 3.0.0 to App.op
|
Python
|
mit
|
mission-liao/pyopenapi
|
python
|
## Code Before:
from __future__ import absolute_import
from ..scan import Dispatcher
from ..errs import SchemaError
from ..spec.v3_0_0.objects import Operation
from ..utils import scope_compose
from ..consts import private
class TypeReduce(object):
""" Type Reducer, collect Operation & Model
spreaded in Resources put in a global accessible place.
"""
class Disp(Dispatcher): pass
def __init__(self, sep=private.SCOPE_SEPARATOR):
self.op = {}
self.__sep = sep
@Disp.register([Operation])
def _op(self, path, obj, _):
scope = obj.tags[0] if obj.tags and len(obj.tags) > 0 else None
name = obj.operationId if obj.operationId else None
# in swagger 2.0, both 'operationId' and 'tags' are optional.
# When 'operationId' is empty, it causes 'scope_compose' return something
# duplicated with other Operations with the same tag.
if not name:
return
new_scope = scope_compose(scope, name, sep=self.__sep)
if new_scope:
if new_scope in self.op.keys():
raise SchemaError('duplicated key found: ' + new_scope)
self.op[new_scope] = obj
## Instruction:
Allow to reduce Operations in 2.0 and 3.0.0 to App.op
## Code After:
from __future__ import absolute_import
from ..scan import Dispatcher
from ..errs import SchemaError
from ..spec.v3_0_0.objects import Operation as Op3
from ..spec.v2_0.objects import Operation as Op2
from ..utils import scope_compose
from ..consts import private
class TypeReduce(object):
""" Type Reducer, collect Operation & Model
spreaded in Resources put in a global accessible place.
"""
class Disp(Dispatcher): pass
def __init__(self, sep=private.SCOPE_SEPARATOR):
self.op = {}
self.__sep = sep
@Disp.register([Op3, Op2])
def _op(self, path, obj, _):
scope = obj.tags[0] if obj.tags and len(obj.tags) > 0 else None
name = obj.operationId if obj.operationId else None
# in swagger 2.0, both 'operationId' and 'tags' are optional.
# When 'operationId' is empty, it causes 'scope_compose' return something
# duplicated with other Operations with the same tag.
if not name:
return
new_scope = scope_compose(scope, name, sep=self.__sep)
if new_scope:
if new_scope in self.op.keys():
raise SchemaError('duplicated key found: ' + new_scope)
self.op[new_scope] = obj
|
# ... existing code ...
from __future__ import absolute_import
from ..scan import Dispatcher
from ..errs import SchemaError
from ..spec.v3_0_0.objects import Operation as Op3
from ..spec.v2_0.objects import Operation as Op2
from ..utils import scope_compose
from ..consts import private
# ... modified code ...
self.op = {}
self.__sep = sep
@Disp.register([Op3, Op2])
def _op(self, path, obj, _):
scope = obj.tags[0] if obj.tags and len(obj.tags) > 0 else None
name = obj.operationId if obj.operationId else None
# ... rest of the code ...
|
424588f4cdad2dd063b15895198611703b187bec
|
pynpact/tests/steps/conftest.py
|
pynpact/tests/steps/conftest.py
|
import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
Make pynpact tests use GeventExecutor
|
Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.
|
Python
|
bsd-3-clause
|
NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact
|
python
|
## Code Before:
import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
## Instruction:
Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.
## Code After:
import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
...
import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
...
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.